diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/vulkan/vk_platform.h | 92 | ||||
-rw-r--r-- | include/vulkan/vulkan.h | 79 | ||||
-rw-r--r-- | include/vulkan/vulkan.hpp | 42554 | ||||
-rw-r--r-- | include/vulkan/vulkan_android.h | 126 | ||||
-rw-r--r-- | include/vulkan/vulkan_core.h | 7470 | ||||
-rw-r--r-- | include/vulkan/vulkan_ios.h | 58 | ||||
-rw-r--r-- | include/vulkan/vulkan_macos.h | 58 | ||||
-rw-r--r-- | include/vulkan/vulkan_mir.h | 65 | ||||
-rw-r--r-- | include/vulkan/vulkan_vi.h | 58 | ||||
-rw-r--r-- | include/vulkan/vulkan_wayland.h | 65 | ||||
-rw-r--r-- | include/vulkan/vulkan_win32.h | 276 | ||||
-rw-r--r-- | include/vulkan/vulkan_xcb.h | 66 | ||||
-rw-r--r-- | include/vulkan/vulkan_xlib.h | 66 | ||||
-rw-r--r-- | include/vulkan/vulkan_xlib_xrandr.h | 54 |
14 files changed, 51087 insertions, 0 deletions
diff --git a/include/vulkan/vk_platform.h b/include/vulkan/vk_platform.h new file mode 100644 index 0000000..7289299 --- /dev/null +++ b/include/vulkan/vk_platform.h @@ -0,0 +1,92 @@ +// +// File: vk_platform.h +// +/* +** Copyright (c) 2014-2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +#ifndef VK_PLATFORM_H_ +#define VK_PLATFORM_H_ + +#ifdef __cplusplus +extern "C" +{ +#endif // __cplusplus + +/* +*************************************************************************************************** +* Platform-specific directives and type declarations +*************************************************************************************************** +*/ + +/* Platform-specific calling convention macros. + * + * Platforms should define these so that Vulkan clients call Vulkan commands + * with the same calling conventions that the Vulkan implementation expects. + * + * VKAPI_ATTR - Placed before the return type in function declarations. + * Useful for C++11 and GCC/Clang-style function attribute syntax. + * VKAPI_CALL - Placed after the return type in function declarations. + * Useful for MSVC-style calling convention syntax. + * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. + * + * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); + * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); + */ +#if defined(_WIN32) + // On Windows, Vulkan commands use the stdcall convention + #define VKAPI_ATTR + #define VKAPI_CALL __stdcall + #define VKAPI_PTR VKAPI_CALL +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error "Vulkan isn't supported for the 'armeabi' NDK ABI" +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) + // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" + // calling convention, i.e. float parameters are passed in registers. This + // is true even if the rest of the application passes floats on the stack, + // as it does by default when compiling for the armeabi-v7a NDK ABI. + #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) + #define VKAPI_CALL + #define VKAPI_PTR VKAPI_ATTR +#else + // On other platforms, use the default calling convention + #define VKAPI_ATTR + #define VKAPI_CALL + #define VKAPI_PTR +#endif + +#include <stddef.h> + +#if !defined(VK_NO_STDINT_H) + #if defined(_MSC_VER) && (_MSC_VER < 1600) + typedef signed __int8 int8_t; + typedef unsigned __int8 uint8_t; + typedef signed __int16 int16_t; + typedef unsigned __int16 uint16_t; + typedef signed __int32 int32_t; + typedef unsigned __int32 uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include <stdint.h> + #endif +#endif // !defined(VK_NO_STDINT_H) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif diff --git a/include/vulkan/vulkan.h b/include/vulkan/vulkan.h new file mode 100644 index 0000000..d05c849 --- /dev/null +++ b/include/vulkan/vulkan.h @@ -0,0 +1,79 @@ +#ifndef VULKAN_H_ +#define VULKAN_H_ 1 + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "vk_platform.h" +#include "vulkan_core.h" + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include "vulkan_android.h" +#endif + + +#ifdef VK_USE_PLATFORM_IOS_MVK +#include "vulkan_ios.h" +#endif + + +#ifdef VK_USE_PLATFORM_MACOS_MVK +#include "vulkan_macos.h" +#endif + + +#ifdef VK_USE_PLATFORM_MIR_KHR +#include <mir_toolkit/client_types.h> +#include "vulkan_mir.h" +#endif + + +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" +#endif + + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include <wayland-client.h> +#include "vulkan_wayland.h" +#endif + + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include <windows.h> +#include "vulkan_win32.h" +#endif + + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include <xcb/xcb.h> +#include "vulkan_xcb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include <X11/Xlib.h> +#include "vulkan_xlib.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include <X11/Xlib.h> +#include <X11/extensions/Xrandr.h> +#include "vulkan_xlib_xrandr.h" +#endif + +#endif // VULKAN_H_ diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp new file mode 100644 index 0000000..3ea82c8 --- /dev/null +++ b/include/vulkan/vulkan.hpp @@ -0,0 +1,42554 @@ +// Copyright (c) 2015-2018 The Khronos Group Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HPP
+#define VULKAN_HPP
+
+#include <algorithm>
+#include <array>
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <initializer_list>
+#include <string>
+#include <system_error>
+#include <tuple>
+#include <type_traits>
+#include <vulkan/vulkan.h>
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# include <memory>
+# include <vector>
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if !defined(VULKAN_HPP_ASSERT)
+# include <cassert>
+# define VULKAN_HPP_ASSERT assert
+#endif
+static_assert( VK_HEADER_VERSION == 74 , "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+# define VULKAN_HPP_TYPESAFE_CONVERSION
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
+# if defined(__clang__)
+# if __has_feature(cxx_unrestricted_unions)
+# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+# endif
+# elif defined(__GNUC__)
+# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
+# if 40600 <= GCC_VERSION
+# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+# endif
+# elif defined(_MSC_VER)
+# if 1900 <= _MSC_VER
+# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+# endif
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_INLINE)
+# if defined(__clang___)
+# if __has_attribute(always_inline)
+# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# else
+# define VULKAN_HPP_INLINE inline
+# endif
+# elif defined(__GNUC__)
+# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# elif defined(_MSC_VER)
+# define VULKAN_HPP_INLINE __forceinline
+# else
+# define VULKAN_HPP_INLINE inline
+# endif
+#endif
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+# define VULKAN_HPP_TYPESAFE_EXPLICIT
+#else
+# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
+#endif
+
+#if defined(_MSC_VER) && (_MSC_VER <= 1800)
+# define VULKAN_HPP_CONSTEXPR
+#else
+# define VULKAN_HPP_CONSTEXPR constexpr
+#endif
+
+
+#if !defined(VULKAN_HPP_NAMESPACE)
+#define VULKAN_HPP_NAMESPACE vk
+#endif
+
+#define VULKAN_HPP_STRINGIFY2(text) #text
+#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text)
+#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE)
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+ template <typename FlagBitsType> struct FlagTraits
+ {
+ enum { allFlags = 0 };
+ };
+
+ template <typename BitType, typename MaskType = VkFlags>
+ class Flags
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Flags()
+ : m_mask(0)
+ {
+ }
+
+ Flags(BitType bit)
+ : m_mask(static_cast<MaskType>(bit))
+ {
+ }
+
+ Flags(Flags<BitType> const& rhs)
+ : m_mask(rhs.m_mask)
+ {
+ }
+
+ explicit Flags(MaskType flags)
+ : m_mask(flags)
+ {
+ }
+
+ Flags<BitType> & operator=(Flags<BitType> const& rhs)
+ {
+ m_mask = rhs.m_mask;
+ return *this;
+ }
+
+ Flags<BitType> & operator|=(Flags<BitType> const& rhs)
+ {
+ m_mask |= rhs.m_mask;
+ return *this;
+ }
+
+ Flags<BitType> & operator&=(Flags<BitType> const& rhs)
+ {
+ m_mask &= rhs.m_mask;
+ return *this;
+ }
+
+ Flags<BitType> & operator^=(Flags<BitType> const& rhs)
+ {
+ m_mask ^= rhs.m_mask;
+ return *this;
+ }
+
+ Flags<BitType> operator|(Flags<BitType> const& rhs) const
+ {
+ Flags<BitType> result(*this);
+ result |= rhs;
+ return result;
+ }
+
+ Flags<BitType> operator&(Flags<BitType> const& rhs) const
+ {
+ Flags<BitType> result(*this);
+ result &= rhs;
+ return result;
+ }
+
+ Flags<BitType> operator^(Flags<BitType> const& rhs) const
+ {
+ Flags<BitType> result(*this);
+ result ^= rhs;
+ return result;
+ }
+
+ bool operator!() const
+ {
+ return !m_mask;
+ }
+
+ Flags<BitType> operator~() const
+ {
+ Flags<BitType> result(*this);
+ result.m_mask ^= FlagTraits<BitType>::allFlags;
+ return result;
+ }
+
+ bool operator==(Flags<BitType> const& rhs) const
+ {
+ return m_mask == rhs.m_mask;
+ }
+
+ bool operator!=(Flags<BitType> const& rhs) const
+ {
+ return m_mask != rhs.m_mask;
+ }
+
+ explicit operator bool() const
+ {
+ return !!m_mask;
+ }
+
+ explicit operator MaskType() const
+ {
+ return m_mask;
+ }
+
+ private:
+ MaskType m_mask;
+ };
+
+ template <typename BitType>
+ Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags)
+ {
+ return flags | bit;
+ }
+
+ template <typename BitType>
+ Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags)
+ {
+ return flags & bit;
+ }
+
+ template <typename BitType>
+ Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags)
+ {
+ return flags ^ bit;
+ }
+
+
+ template <typename RefType>
+ class Optional
+ {
+ public:
+ Optional(RefType & reference) { m_ptr = &reference; }
+ Optional(RefType * ptr) { m_ptr = ptr; }
+ Optional(std::nullptr_t) { m_ptr = nullptr; }
+
+ operator RefType*() const { return m_ptr; }
+ RefType const* operator->() const { return m_ptr; }
+ explicit operator bool() const { return !!m_ptr; }
+
+ private:
+ RefType *m_ptr;
+ };
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T>
+ class ArrayProxy
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t)
+ : m_count(0)
+ , m_ptr(nullptr)
+ {}
+
+ ArrayProxy(T & ptr)
+ : m_count(1)
+ , m_ptr(&ptr)
+ {}
+
+ ArrayProxy(uint32_t count, T * ptr)
+ : m_count(count)
+ , m_ptr(ptr)
+ {}
+
+ template <size_t N>
+ ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data)
+ : m_count(N)
+ , m_ptr(data.data())
+ {}
+
+ template <size_t N>
+ ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data)
+ : m_count(N)
+ , m_ptr(data.data())
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+ ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data)
+ : m_count(static_cast<uint32_t>(data.size()))
+ , m_ptr(data.data())
+ {}
+
+ template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+ ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data)
+ : m_count(static_cast<uint32_t>(data.size()))
+ , m_ptr(data.data())
+ {}
+
+ ArrayProxy(std::initializer_list<T> const& data)
+ : m_count(static_cast<uint32_t>(data.end() - data.begin()))
+ , m_ptr(data.begin())
+ {}
+
+ const T * begin() const
+ {
+ return m_ptr;
+ }
+
+ const T * end() const
+ {
+ return m_ptr + m_count;
+ }
+
+ const T & front() const
+ {
+ VULKAN_HPP_ASSERT(m_count && m_ptr);
+ return *m_ptr;
+ }
+
+ const T & back() const
+ {
+ VULKAN_HPP_ASSERT(m_count && m_ptr);
+ return *(m_ptr + m_count - 1);
+ }
+
+ bool empty() const
+ {
+ return (m_count == 0);
+ }
+
+ uint32_t size() const
+ {
+ return m_count;
+ }
+
+ T * data() const
+ {
+ return m_ptr;
+ }
+
+ private:
+ uint32_t m_count;
+ T * m_ptr;
+ };
+#endif
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+
+ template <typename Type> class UniqueHandleTraits;
+
+ template <typename Type>
+ class UniqueHandle : public UniqueHandleTraits<Type>::deleter
+ {
+ private:
+ using Deleter = typename UniqueHandleTraits<Type>::deleter;
+ public:
+ explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() )
+ : Deleter( deleter)
+ , m_value( value )
+ {}
+
+ UniqueHandle( UniqueHandle const& ) = delete;
+
+ UniqueHandle( UniqueHandle && other )
+ : Deleter( std::move( static_cast<Deleter&>( other ) ) )
+ , m_value( other.release() )
+ {}
+
+ ~UniqueHandle()
+ {
+ if ( m_value ) this->destroy( m_value );
+ }
+
+ UniqueHandle & operator=( UniqueHandle const& ) = delete;
+
+ UniqueHandle & operator=( UniqueHandle && other )
+ {
+ reset( other.release() );
+ *static_cast<Deleter*>(this) = std::move( static_cast<Deleter&>(other) );
+ return *this;
+ }
+
+ explicit operator bool() const
+ {
+ return m_value.operator bool();
+ }
+
+ Type const* operator->() const
+ {
+ return &m_value;
+ }
+
+ Type * operator->()
+ {
+ return &m_value;
+ }
+
+ Type const& operator*() const
+ {
+ return m_value;
+ }
+
+ Type & operator*()
+ {
+ return m_value;
+ }
+
+ const Type & get() const
+ {
+ return m_value;
+ }
+
+ Type & get()
+ {
+ return m_value;
+ }
+
+ void reset( Type const& value = Type() )
+ {
+ if ( m_value != value )
+ {
+ if ( m_value ) this->destroy( m_value );
+ m_value = value;
+ }
+ }
+
+ Type release()
+ {
+ Type value = m_value;
+ m_value = nullptr;
+ return value;
+ }
+
+ void swap( UniqueHandle<Type> & rhs )
+ {
+ std::swap(m_value, rhs.m_value);
+ std::swap(static_cast<Deleter&>(*this), static_cast<Deleter&>(rhs));
+ }
+
+ private:
+ Type m_value;
+ };
+
+ template <typename Type>
+ VULKAN_HPP_INLINE void swap( UniqueHandle<Type> & lhs, UniqueHandle<Type> & rhs )
+ {
+ lhs.swap( rhs );
+ }
+#endif
+
+
+ template <typename X, typename Y> struct isStructureChainValid { enum { value = false }; };
+
+ template <class Element>
+ class StructureChainElement
+ {
+ public:
+ explicit operator Element&() { return value; }
+ explicit operator const Element&() const { return value; }
+ private:
+ Element value;
+ };
+
+ template<typename ...StructureElements>
+ class StructureChain : private StructureChainElement<StructureElements>...
+ {
+ public:
+ StructureChain()
+ {
+ link<StructureElements...>();
+ }
+
+ StructureChain(StructureChain const &rhs)
+ {
+ linkAndCopy<StructureElements...>(rhs);
+ }
+
+ StructureChain& operator=(StructureChain const &rhs)
+ {
+ linkAndCopy<StructureElements...>(rhs);
+ return *this;
+ }
+
+ template<typename ClassType> ClassType& get() { return static_cast<ClassType&>(*this);}
+
+ private:
+ template<typename X>
+ void link()
+ {
+ }
+
+ template<typename X, typename Y, typename ...Z>
+ void link()
+ {
+ static_assert(isStructureChainValid<X,Y>::value, "The structure chain is not valid!");
+ X& x = static_cast<X&>(*this);
+ Y& y = static_cast<Y&>(*this);
+ x.pNext = &y;
+ link<Y, Z...>();
+ }
+
+ template<typename X>
+ void linkAndCopy(StructureChain const &rhs)
+ {
+ static_cast<X&>(*this) = static_cast<X const &>(rhs);
+ }
+
+ template<typename X, typename Y, typename ...Z>
+ void linkAndCopy(StructureChain const &rhs)
+ {
+ static_assert(isStructureChainValid<X,Y>::value, "The structure chain is not valid!");
+ X& x = static_cast<X&>(*this);
+ Y& y = static_cast<Y&>(*this);
+ x = static_cast<X const &>(rhs);
+ x.pNext = &y;
+ linkAndCopy<Y, Z...>(rhs);
+ }
+
+};
+ enum class Result
+ {
+ eSuccess = VK_SUCCESS,
+ eNotReady = VK_NOT_READY,
+ eTimeout = VK_TIMEOUT,
+ eEventSet = VK_EVENT_SET,
+ eEventReset = VK_EVENT_RESET,
+ eIncomplete = VK_INCOMPLETE,
+ eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+ eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+ eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+ eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+ eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+ eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+ eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+ eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+ eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+ eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+ eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+ eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+ eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
+ eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY,
+ eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+ eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+ eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+ eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+ eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+ eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+ eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+ eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+ eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+ eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
+ eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string(Result value)
+ {
+ switch (value)
+ {
+ case Result::eSuccess: return "Success";
+ case Result::eNotReady: return "NotReady";
+ case Result::eTimeout: return "Timeout";
+ case Result::eEventSet: return "EventSet";
+ case Result::eEventReset: return "EventReset";
+ case Result::eIncomplete: return "Incomplete";
+ case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
+ case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
+ case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
+ case Result::eErrorDeviceLost: return "ErrorDeviceLost";
+ case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
+ case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
+ case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
+ case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
+ case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
+ case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
+ case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
+ case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
+ case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory";
+ case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle";
+ case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
+ case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
+ case Result::eSuboptimalKHR: return "SuboptimalKHR";
+ case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
+ case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
+ case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
+ case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
+ case Result::eErrorFragmentationEXT: return "ErrorFragmentationEXT";
+ case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT";
+ default: return "invalid";
+ }
+ }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# define noexcept _NOEXCEPT
+#endif
+
+ class ErrorCategoryImpl : public std::error_category
+ {
+ public:
+ virtual const char* name() const noexcept override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
+ virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
+ };
+
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# undef noexcept
+#endif
+
+ VULKAN_HPP_INLINE const std::error_category& errorCategory()
+ {
+ static ErrorCategoryImpl instance;
+ return instance;
+ }
+
+ VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
+ {
+ return std::error_code(static_cast<int>(e), errorCategory());
+ }
+
+ VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
+ {
+ return std::error_condition(static_cast<int>(e), errorCategory());
+ }
+
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# define noexcept _NOEXCEPT
+#endif
+
+ class Error
+ {
+ public:
+ virtual ~Error() = default;
+
+ virtual const char* what() const noexcept = 0;
+ };
+
+ class LogicError : public Error, public std::logic_error
+ {
+ public:
+ explicit LogicError( const std::string& what )
+ : Error(), std::logic_error(what) {}
+ explicit LogicError( char const * what )
+ : Error(), std::logic_error(what) {}
+ virtual ~LogicError() = default;
+
+ virtual const char* what() const noexcept { return std::logic_error::what(); }
+ };
+
+ class SystemError : public Error, public std::system_error
+ {
+ public:
+ SystemError( std::error_code ec )
+ : Error(), std::system_error(ec) {}
+ SystemError( std::error_code ec, std::string const& what )
+ : Error(), std::system_error(ec, what) {}
+ SystemError( std::error_code ec, char const * what )
+ : Error(), std::system_error(ec, what) {}
+ SystemError( int ev, std::error_category const& ecat )
+ : Error(), std::system_error(ev, ecat) {}
+ SystemError( int ev, std::error_category const& ecat, std::string const& what)
+ : Error(), std::system_error(ev, ecat, what) {}
+ SystemError( int ev, std::error_category const& ecat, char const * what)
+ : Error(), std::system_error(ev, ecat, what) {}
+ virtual ~SystemError() = default;
+
+ virtual const char* what() const noexcept { return std::system_error::what(); }
+ };
+
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# undef noexcept
+#endif
+
+ class OutOfHostMemoryError : public SystemError
+ {
+ public:
+ OutOfHostMemoryError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+ OutOfHostMemoryError( char const * message )
+ : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+ };
+ class OutOfDeviceMemoryError : public SystemError
+ {
+ public:
+ OutOfDeviceMemoryError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+ OutOfDeviceMemoryError( char const * message )
+ : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+ };
+ class InitializationFailedError : public SystemError
+ {
+ public:
+ InitializationFailedError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+ InitializationFailedError( char const * message )
+ : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+ };
+ class DeviceLostError : public SystemError
+ {
+ public:
+ DeviceLostError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+ DeviceLostError( char const * message )
+ : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+ };
+ class MemoryMapFailedError : public SystemError
+ {
+ public:
+ MemoryMapFailedError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+ MemoryMapFailedError( char const * message )
+ : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+ };
+ class LayerNotPresentError : public SystemError
+ {
+ public:
+ LayerNotPresentError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+ LayerNotPresentError( char const * message )
+ : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+ };
+ class ExtensionNotPresentError : public SystemError
+ {
+ public:
+ ExtensionNotPresentError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+ ExtensionNotPresentError( char const * message )
+ : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+ };
+ class FeatureNotPresentError : public SystemError
+ {
+ public:
+ FeatureNotPresentError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+ FeatureNotPresentError( char const * message )
+ : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+ };
+ class IncompatibleDriverError : public SystemError
+ {
+ public:
+ IncompatibleDriverError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+ IncompatibleDriverError( char const * message )
+ : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+ };
+ class TooManyObjectsError : public SystemError
+ {
+ public:
+ TooManyObjectsError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+ TooManyObjectsError( char const * message )
+ : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+ };
+ class FormatNotSupportedError : public SystemError
+ {
+ public:
+ FormatNotSupportedError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+ FormatNotSupportedError( char const * message )
+ : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+ };
+ class FragmentedPoolError : public SystemError
+ {
+ public:
+ FragmentedPoolError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+ FragmentedPoolError( char const * message )
+ : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+ };
+ class OutOfPoolMemoryError : public SystemError
+ {
+ public:
+ OutOfPoolMemoryError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+ OutOfPoolMemoryError( char const * message )
+ : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+ };
+ class InvalidExternalHandleError : public SystemError
+ {
+ public:
+ InvalidExternalHandleError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+ InvalidExternalHandleError( char const * message )
+ : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+ };
+ class SurfaceLostKHRError : public SystemError
+ {
+ public:
+ SurfaceLostKHRError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+ SurfaceLostKHRError( char const * message )
+ : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+ };
+ class NativeWindowInUseKHRError : public SystemError
+ {
+ public:
+ NativeWindowInUseKHRError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+ NativeWindowInUseKHRError( char const * message )
+ : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+ };
+ class OutOfDateKHRError : public SystemError
+ {
+ public:
+ OutOfDateKHRError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+ OutOfDateKHRError( char const * message )
+ : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+ };
+ class IncompatibleDisplayKHRError : public SystemError
+ {
+ public:
+ IncompatibleDisplayKHRError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+ IncompatibleDisplayKHRError( char const * message )
+ : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+ };
+ class ValidationFailedEXTError : public SystemError
+ {
+ public:
+ ValidationFailedEXTError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+ ValidationFailedEXTError( char const * message )
+ : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+ };
+ class InvalidShaderNVError : public SystemError
+ {
+ public:
+ InvalidShaderNVError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+ InvalidShaderNVError( char const * message )
+ : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+ };
+ class FragmentationEXTError : public SystemError
+ {
+ public:
+ FragmentationEXTError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
+ FragmentationEXTError( char const * message )
+ : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
+ };
+ class NotPermittedEXTError : public SystemError
+ {
+ public:
+ NotPermittedEXTError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
+ NotPermittedEXTError( char const * message )
+ : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
+ };
+
+ VULKAN_HPP_INLINE void throwResultException( Result result, char const * message )
+ {
+ switch ( result )
+ {
+ case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError ( message );
+ case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError ( message );
+ case Result::eErrorInitializationFailed: throw InitializationFailedError ( message );
+ case Result::eErrorDeviceLost: throw DeviceLostError ( message );
+ case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError ( message );
+ case Result::eErrorLayerNotPresent: throw LayerNotPresentError ( message );
+ case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError ( message );
+ case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError ( message );
+ case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError ( message );
+ case Result::eErrorTooManyObjects: throw TooManyObjectsError ( message );
+ case Result::eErrorFormatNotSupported: throw FormatNotSupportedError ( message );
+ case Result::eErrorFragmentedPool: throw FragmentedPoolError ( message );
+ case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError ( message );
+ case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError ( message );
+ case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError ( message );
+ case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError ( message );
+ case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError ( message );
+ case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError ( message );
+ case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError ( message );
+ case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError ( message );
+ case Result::eErrorFragmentationEXT: throw FragmentationEXTError ( message );
+ case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError ( message );
+ default: throw SystemError( make_error_code( result ) );
+ }
+ }
+#endif
+} // namespace VULKAN_HPP_NAMESPACE
+
+namespace std
+{
+ template <>
+ struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
+ {};
+}
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+ template <typename T>
+ struct ResultValue
+ {
+ ResultValue( Result r, T & v )
+ : result( r )
+ , value( v )
+ {}
+
+ ResultValue( Result r, T && v )
+ : result( r )
+ , value( std::move( v ) )
+ {}
+
+ Result result;
+ T value;
+
+ operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
+ };
+
+ template <typename T>
+ struct ResultValueType
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ typedef ResultValue<T> type;
+#else
+ typedef T type;
+#endif
+ };
+
+ template <>
+ struct ResultValueType<void>
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ typedef Result type;
+#else
+ typedef void type;
+#endif
+ };
+
+ VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( result == Result::eSuccess );
+ return result;
+#else
+ if ( result != Result::eSuccess )
+ {
+ throwResultException( result, message );
+ }
+#endif
+ }
+
+ template <typename T>
+ VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( result == Result::eSuccess );
+ return ResultValue<T>( result, data );
+#else
+ if ( result != Result::eSuccess )
+ {
+ throwResultException( result, message );
+ }
+ return std::move( data );
+#endif
+ }
+
+ VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+ if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+ {
+ throwResultException( result, message );
+ }
+#endif
+ return result;
+ }
+
+ template <typename T>
+ VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+ if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+ {
+ throwResultException( result, message );
+ }
+#endif
+ return ResultValue<T>( result, data );
+ }
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename T>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T>>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits<T>::deleter const& deleter )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( result == Result::eSuccess );
+ return ResultValue<UniqueHandle<T>>( result, UniqueHandle<T>(data, deleter) );
+#else
+ if ( result != Result::eSuccess )
+ {
+ throwResultException( result, message );
+ }
+ return UniqueHandle<T>(data, deleter);
+#endif
+ }
+#endif
+
+
+ struct AllocationCallbacks;
+
+ template <typename OwnerType>
+ class ObjectDestroy
+ {
+ public:
+ ObjectDestroy(OwnerType owner = OwnerType(), Optional<const AllocationCallbacks> allocator = nullptr)
+ : m_owner(owner)
+ , m_allocator(allocator)
+ {}
+
+ OwnerType getOwner() const { return m_owner; }
+ Optional<const AllocationCallbacks> getAllocator() const { return m_allocator; }
+
+ protected:
+ template <typename T>
+ void destroy(T t)
+ {
+ m_owner.destroy(t, m_allocator);
+ }
+
+ private:
+ OwnerType m_owner;
+ Optional<const AllocationCallbacks> m_allocator;
+ };
+
+ class NoParent;
+
+ template <>
+ class ObjectDestroy<NoParent>
+ {
+ public:
+ ObjectDestroy( Optional<const AllocationCallbacks> allocator = nullptr )
+ : m_allocator( allocator )
+ {}
+
+ Optional<const AllocationCallbacks> getAllocator() const { return m_allocator; }
+
+ protected:
+ template <typename T>
+ void destroy(T t)
+ {
+ t.destroy( m_allocator );
+ }
+
+ private:
+ Optional<const AllocationCallbacks> m_allocator;
+ };
+
+ template <typename OwnerType>
+ class ObjectFree
+ {
+ public:
+ ObjectFree(OwnerType owner = OwnerType(), Optional<const AllocationCallbacks> allocator = nullptr)
+ : m_owner(owner)
+ , m_allocator(allocator)
+ {}
+
+ OwnerType getOwner() const { return m_owner; }
+ Optional<const AllocationCallbacks> getAllocator() const { return m_allocator; }
+
+ protected:
+ template <typename T>
+ void destroy(T t)
+ {
+ m_owner.free(t, m_allocator);
+ }
+
+ private:
+ OwnerType m_owner;
+ Optional<const AllocationCallbacks> m_allocator;
+ };
+
+ template <typename OwnerType, typename PoolType>
+ class PoolFree
+ {
+ public:
+ PoolFree(OwnerType owner = OwnerType(), PoolType pool = PoolType())
+ : m_owner(owner)
+ , m_pool(pool)
+ {}
+
+ OwnerType getOwner() const { return m_owner; }
+ PoolType getPool() const { return m_pool; }
+
+ protected:
+ template <typename T>
+ void destroy(T t)
+ {
+ m_owner.free(m_pool, t);
+ }
+
+ private:
+ OwnerType m_owner;
+ PoolType m_pool;
+ };
+
+class DispatchLoaderStatic
+{
+public:
+ VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const
+ {
+ return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex);
+ }
+ VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const
+ {
+ return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex);
+ }
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const
+ {
+ return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display);
+ }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+ VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const
+ {
+ return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers);
+ }
+ VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const
+ {
+ return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets);
+ }
+ VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const
+ {
+ return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory);
+ }
+ VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const
+ {
+ return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo);
+ }
+ VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const
+ {
+ return ::vkBindBufferMemory( device, buffer, memory, memoryOffset);
+ }
+ VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const
+ {
+ return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos);
+ }
+ VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const
+ {
+ return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos);
+ }
+ VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const
+ {
+ return ::vkBindImageMemory( device, image, memory, memoryOffset);
+ }
+ VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const
+ {
+ return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos);
+ }
+ VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const
+ {
+ return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos);
+ }
+ void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const
+ {
+ return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo);
+ }
+ void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const
+ {
+ return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags);
+ }
+ void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const
+ {
+ return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents);
+ }
+ void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
+ {
+ return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+ }
+ void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const
+ {
+ return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType);
+ }
+ void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const
+ {
+ return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline);
+ }
+ void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const
+ {
+ return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+ }
+ void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const
+ {
+ return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+ }
+ void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const
+ {
+ return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+ }
+ void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const
+ {
+ return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+ }
+ void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const
+ {
+ return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+ }
+ void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const
+ {
+ return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+ }
+ void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const
+ {
+ return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+ }
+ void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const
+ {
+ return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ }
+ void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const
+ {
+ return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+ }
+ void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const
+ {
+ return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+ }
+ void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+ {
+ return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo);
+ }
+ void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const
+ {
+ return ::vkCmdDebugMarkerEndEXT( commandBuffer);
+ }
+ void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+ {
+ return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo);
+ }
+ void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
+ {
+ return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ);
+ }
+ void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
+ {
+ return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+ }
+ void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
+ {
+ return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+ }
+ void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const
+ {
+ return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset);
+ }
+ void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+ {
+ return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+ }
+ void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
+ {
+ return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+ }
+ void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ {
+ return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride);
+ }
+ void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+ {
+ return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+ }
+ void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ {
+ return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride);
+ }
+ void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+ {
+ return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+ }
+ void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const
+ {
+ return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer);
+ }
+ void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const
+ {
+ return ::vkCmdEndQuery( commandBuffer, queryPool, query);
+ }
+ void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const
+ {
+ return ::vkCmdEndRenderPass( commandBuffer);
+ }
+ void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const
+ {
+ return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers);
+ }
+ void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const
+ {
+ return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data);
+ }
+ void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const
+ {
+ return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo);
+ }
+ void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const
+ {
+ return ::vkCmdNextSubpass( commandBuffer, contents);
+ }
+ void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const
+ {
+ return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ }
+ void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
+ {
+ return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo);
+ }
+ void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
+ {
+ return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues);
+ }
+ void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const
+ {
+ return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+ }
+ void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const
+ {
+ return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData);
+ }
+ void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
+ {
+ return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo);
+ }
+ void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const
+ {
+ return ::vkCmdResetEvent( commandBuffer, event, stageMask);
+ }
+ void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
+ {
+ return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount);
+ }
+ void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const
+ {
+ return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ }
+ void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const
+ {
+ return ::vkCmdSetBlendConstants( commandBuffer, blendConstants);
+ }
+ void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
+ {
+ return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+ }
+ void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const
+ {
+ return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds);
+ }
+ void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const
+ {
+ return ::vkCmdSetDeviceMask( commandBuffer, deviceMask);
+ }
+ void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const
+ {
+ return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask);
+ }
+ void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const
+ {
+ return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+ }
+ void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const
+ {
+ return ::vkCmdSetEvent( commandBuffer, event, stageMask);
+ }
+ void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const
+ {
+ return ::vkCmdSetLineWidth( commandBuffer, lineWidth);
+ }
+ void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const
+ {
+ return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo);
+ }
+ void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const
+ {
+ return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors);
+ }
+ void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const
+ {
+ return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask);
+ }
+ void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const
+ {
+ return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference);
+ }
+ void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const
+ {
+ return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask);
+ }
+ void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const
+ {
+ return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports);
+ }
+ void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const
+ {
+ return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+ }
+ void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const
+ {
+ return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+ }
+ void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const
+ {
+ return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ }
+ void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const
+ {
+ return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+ }
+ void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const
+ {
+ return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query);
+ }
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const
+ {
+ return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer);
+ }
+ VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const
+ {
+ return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView);
+ }
+ VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const
+ {
+ return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool);
+ }
+ VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const
+ {
+ return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ }
+ VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const
+ {
+ return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback);
+ }
+ VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const
+ {
+ return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger);
+ }
+ VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const
+ {
+ return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool);
+ }
+ VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const
+ {
+ return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout);
+ }
+ VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const
+ {
+ return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+ }
+ VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const
+ {
+ return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+ }
+ VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const
+ {
+ return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice);
+ }
+ VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const
+ {
+ return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode);
+ }
+ VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+ VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const
+ {
+ return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent);
+ }
+ VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const
+ {
+ return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence);
+ }
+ VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const
+ {
+ return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer);
+ }
+ VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const
+ {
+ return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ }
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+ VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const
+ {
+ return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage);
+ }
+ VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const
+ {
+ return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView);
+ }
+ VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
+ {
+ return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+ }
+ VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const
+ {
+ return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance);
+ }
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ VkResult vkCreateMirSurfaceKHR( VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateMirSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+ VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const
+ {
+ return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable);
+ }
+ VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const
+ {
+ return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache);
+ }
+ VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const
+ {
+ return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout);
+ }
+ VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const
+ {
+ return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool);
+ }
+ VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const
+ {
+ return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass);
+ }
+ VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const
+ {
+ return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler);
+ }
+ VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const
+ {
+ return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion);
+ }
+ VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const
+ {
+ return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion);
+ }
+ VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const
+ {
+ return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore);
+ }
+ VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const
+ {
+ return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule);
+ }
+ VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const
+ {
+ return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
+ }
+ VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const
+ {
+ return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain);
+ }
+ VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const
+ {
+ return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache);
+ }
+#ifdef VK_USE_PLATFORM_VI_NN
+ VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const
+ {
+ return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const
+ {
+ return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo);
+ }
+ VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const
+ {
+ return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo);
+ }
+ void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const
+ {
+ return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
+ }
+ void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyBuffer( device, buffer, pAllocator);
+ }
+ void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyBufferView( device, bufferView, pAllocator);
+ }
+ void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyCommandPool( device, commandPool, pAllocator);
+ }
+ void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator);
+ }
+ void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator);
+ }
+ void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator);
+ }
+ void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator);
+ }
+ void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator);
+ }
+ void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator);
+ }
+ void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyDevice( device, pAllocator);
+ }
+ void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyEvent( device, event, pAllocator);
+ }
+ void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyFence( device, fence, pAllocator);
+ }
+ void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyFramebuffer( device, framebuffer, pAllocator);
+ }
+ void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyImage( device, image, pAllocator);
+ }
+ void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyImageView( device, imageView, pAllocator);
+ }
+ void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator);
+ }
+ void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyInstance( instance, pAllocator);
+ }
+ void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator);
+ }
+ void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyPipeline( device, pipeline, pAllocator);
+ }
+ void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator);
+ }
+ void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator);
+ }
+ void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyQueryPool( device, queryPool, pAllocator);
+ }
+ void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyRenderPass( device, renderPass, pAllocator);
+ }
+ void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroySampler( device, sampler, pAllocator);
+ }
+ void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator);
+ }
+ void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator);
+ }
+ void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroySemaphore( device, semaphore, pAllocator);
+ }
+ void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyShaderModule( device, shaderModule, pAllocator);
+ }
+ void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroySurfaceKHR( instance, surface, pAllocator);
+ }
+ void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroySwapchainKHR( device, swapchain, pAllocator);
+ }
+ void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator);
+ }
+ VkResult vkDeviceWaitIdle( VkDevice device ) const
+ {
+ return ::vkDeviceWaitIdle( device);
+ }
+ VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const
+ {
+ return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo);
+ }
+ VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const
+ {
+ return ::vkEndCommandBuffer( commandBuffer);
+ }
+ VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const
+ {
+ return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties);
+ }
+ VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const
+ {
+ return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties);
+ }
+ VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const
+ {
+ return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties);
+ }
+ VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const
+ {
+ return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties);
+ }
+ VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const
+ {
+ return ::vkEnumerateInstanceVersion( pApiVersion);
+ }
+ VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const
+ {
+ return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+ }
+ VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const
+ {
+ return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+ }
+ VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const
+ {
+ return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices);
+ }
+ VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const
+ {
+ return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges);
+ }
+ void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const
+ {
+ return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers);
+ }
+ VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const
+ {
+ return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets);
+ }
+ void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const
+ {
+ return ::vkFreeMemory( device, memory, pAllocator);
+ }
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const
+ {
+ return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties);
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const
+ {
+ return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements);
+ }
+ void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const
+ {
+ return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements);
+ }
+ void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const
+ {
+ return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements);
+ }
+ void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const
+ {
+ return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport);
+ }
+ void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const
+ {
+ return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport);
+ }
+ void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const
+ {
+ return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+ }
+ void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const
+ {
+ return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+ }
+ VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const
+ {
+ return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities);
+ }
+ VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const
+ {
+ return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes);
+ }
+ void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const
+ {
+ return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes);
+ }
+ PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const
+ {
+ return ::vkGetDeviceProcAddr( device, pName);
+ }
+ void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const
+ {
+ return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue);
+ }
+ void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const
+ {
+ return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue);
+ }
+ VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const
+ {
+ return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties);
+ }
+ VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const
+ {
+ return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities);
+ }
+ VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const
+ {
+ return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays);
+ }
+ VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const
+ {
+ return ::vkGetEventStatus( device, event);
+ }
+ VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const
+ {
+ return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd);
+ }
+ VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const
+ {
+ return ::vkGetFenceStatus( device, fence);
+ }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const
+ {
+ return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const
+ {
+ return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements);
+ }
+ void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const
+ {
+ return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements);
+ }
+ void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const
+ {
+ return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements);
+ }
+ void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const
+ {
+ return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+ }
+ void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const
+ {
+ return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+ }
+ void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const
+ {
+ return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+ }
+ void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const
+ {
+ return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout);
+ }
+ PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const
+ {
+ return ::vkGetInstanceProcAddr( instance, pName);
+ }
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const
+ {
+ return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer);
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const
+ {
+ return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd);
+ }
+ VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const
+ {
+ return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties);
+ }
+ VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const
+ {
+ return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties);
+ }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const
+ {
+ return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const
+ {
+ return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const
+ {
+ return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const
+ {
+ return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings);
+ }
+ VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties);
+ }
+ VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties);
+ }
+ void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+ }
+ void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+ }
+ void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+ }
+ void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+ }
+ VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
+ }
+ void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+ }
+ void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const
+ {
+ return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+ }
+ void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const
+ {
+ return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures);
+ }
+ void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const
+ {
+ return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures);
+ }
+ void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const
+ {
+ return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures);
+ }
+ void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties);
+ }
+ void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties);
+ }
+ void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties);
+ }
+ void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const
+ {
+ return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits);
+ }
+ VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+ }
+ VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties);
+ }
+ VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const
+ {
+ return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties);
+ }
+ void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const
+ {
+ return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties);
+ }
+ void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const
+ {
+ return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties);
+ }
+ void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const
+ {
+ return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties);
+ }
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ VkBool32 vkGetPhysicalDeviceMirPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection ) const
+ {
+ return ::vkGetPhysicalDeviceMirPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection);
+ }
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+ void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const
+ {
+ return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties);
+ }
+ VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const
+ {
+ return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects);
+ }
+ void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties);
+ }
+ void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties);
+ }
+ void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties);
+ }
+ void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const
+ {
+ return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+ }
+ void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const
+ {
+ return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+ }
+ void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const
+ {
+ return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+ }
+ void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+ }
+ void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+ }
+ void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const
+ {
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+ }
+ VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const
+ {
+ return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities);
+ }
+ VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const
+ {
+ return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
+ }
+ VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const
+ {
+ return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities);
+ }
+ VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const
+ {
+ return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+ }
+ VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const
+ {
+ return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+ }
+ VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const
+ {
+ return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes);
+ }
+ VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const
+ {
+ return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported);
+ }
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const
+ {
+ return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display);
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const
+ {
+ return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const
+ {
+ return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id);
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const
+ {
+ return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID);
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const
+ {
+ return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData);
+ }
+ VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const
+ {
+ return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+ }
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const
+ {
+ return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay);
+ }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+ VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const
+ {
+ return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties);
+ }
+ void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const
+ {
+ return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity);
+ }
+ VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const
+ {
+ return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd);
+ }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const
+ {
+ return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const
+ {
+ return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+ }
+ VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const
+ {
+ return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue);
+ }
+ VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const
+ {
+ return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages);
+ }
+ VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const
+ {
+ return ::vkGetSwapchainStatusKHR( device, swapchain);
+ }
+ VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const
+ {
+ return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData);
+ }
+ VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const
+ {
+ return ::vkImportFenceFdKHR( device, pImportFenceFdInfo);
+ }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const
+ {
+ return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const
+ {
+ return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo);
+ }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const
+ {
+ return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo);
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const
+ {
+ return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges);
+ }
+ VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const
+ {
+ return ::vkMapMemory( device, memory, offset, size, flags, ppData);
+ }
+ VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const
+ {
+ return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches);
+ }
+ VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const
+ {
+ return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches);
+ }
+ void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const
+ {
+ return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo);
+ }
+ VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const
+ {
+ return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence);
+ }
+ void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const
+ {
+ return ::vkQueueEndDebugUtilsLabelEXT( queue);
+ }
+ void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const
+ {
+ return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo);
+ }
+ VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const
+ {
+ return ::vkQueuePresentKHR( queue, pPresentInfo);
+ }
+ VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const
+ {
+ return ::vkQueueSubmit( queue, submitCount, pSubmits, fence);
+ }
+ VkResult vkQueueWaitIdle( VkQueue queue ) const
+ {
+ return ::vkQueueWaitIdle( queue);
+ }
+ VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const
+ {
+ return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence);
+ }
+ VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const
+ {
+ return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence);
+ }
+ VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
+ {
+ return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+ }
+ VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const
+ {
+ return ::vkReleaseDisplayEXT( physicalDevice, display);
+ }
+ VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const
+ {
+ return ::vkResetCommandBuffer( commandBuffer, flags);
+ }
+ VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const
+ {
+ return ::vkResetCommandPool( device, commandPool, flags);
+ }
+ VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const
+ {
+ return ::vkResetDescriptorPool( device, descriptorPool, flags);
+ }
+ VkResult vkResetEvent( VkDevice device, VkEvent event ) const
+ {
+ return ::vkResetEvent( device, event);
+ }
+ VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const
+ {
+ return ::vkResetFences( device, fenceCount, pFences);
+ }
+ VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const
+ {
+ return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo);
+ }
+ VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const
+ {
+ return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo);
+ }
+ VkResult vkSetEvent( VkDevice device, VkEvent event ) const
+ {
+ return ::vkSetEvent( device, event);
+ }
+ void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const
+ {
+ return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata);
+ }
+ void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const
+ {
+ return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData);
+ }
+ void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const
+ {
+ return ::vkTrimCommandPool( device, commandPool, flags);
+ }
+ void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const
+ {
+ return ::vkTrimCommandPoolKHR( device, commandPool, flags);
+ }
+ void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const
+ {
+ return ::vkUnmapMemory( device, memory);
+ }
+ VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
+ {
+ return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+ }
+ void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const
+ {
+ return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData);
+ }
+ void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const
+ {
+ return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData);
+ }
+ void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const
+ {
+ return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+ }
+ VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const
+ {
+ return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout);
+ }
+};
+ using SampleMask = uint32_t;
+
+ using Bool32 = uint32_t;
+
+ using DeviceSize = uint64_t;
+
+ enum class FramebufferCreateFlagBits
+ {
+ };
+
+ using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
+
+ enum class QueryPoolCreateFlagBits
+ {
+ };
+
+ using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
+
+ enum class RenderPassCreateFlagBits
+ {
+ };
+
+ using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
+
+ enum class SamplerCreateFlagBits
+ {
+ };
+
+ using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
+
+ enum class PipelineLayoutCreateFlagBits
+ {
+ };
+
+ using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
+
+ enum class PipelineCacheCreateFlagBits
+ {
+ };
+
+ using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
+
+ enum class PipelineDepthStencilStateCreateFlagBits
+ {
+ };
+
+ using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
+
+ enum class PipelineDynamicStateCreateFlagBits
+ {
+ };
+
+ using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
+
+ enum class PipelineColorBlendStateCreateFlagBits
+ {
+ };
+
+ using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
+
+ enum class PipelineMultisampleStateCreateFlagBits
+ {
+ };
+
+ using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
+
+ enum class PipelineRasterizationStateCreateFlagBits
+ {
+ };
+
+ using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
+
+ enum class PipelineViewportStateCreateFlagBits
+ {
+ };
+
+ using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
+
+ enum class PipelineTessellationStateCreateFlagBits
+ {
+ };
+
+ using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
+
+ enum class PipelineInputAssemblyStateCreateFlagBits
+ {
+ };
+
+ using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
+
+ enum class PipelineVertexInputStateCreateFlagBits
+ {
+ };
+
+ using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
+
+ enum class PipelineShaderStageCreateFlagBits
+ {
+ };
+
+ using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
+
+ enum class BufferViewCreateFlagBits
+ {
+ };
+
+ using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
+
+ enum class InstanceCreateFlagBits
+ {
+ };
+
+ using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
+
+ enum class DeviceCreateFlagBits
+ {
+ };
+
+ using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
+
+ enum class ImageViewCreateFlagBits
+ {
+ };
+
+ using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
+
+ enum class SemaphoreCreateFlagBits
+ {
+ };
+
+ using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
+
+ enum class ShaderModuleCreateFlagBits
+ {
+ };
+
+ using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
+
+ enum class EventCreateFlagBits
+ {
+ };
+
+ using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
+
+ enum class MemoryMapFlagBits
+ {
+ };
+
+ using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
+
+ enum class DescriptorPoolResetFlagBits
+ {
+ };
+
+ using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
+
+ enum class DescriptorUpdateTemplateCreateFlagBits
+ {
+ };
+
+ using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits, VkDescriptorUpdateTemplateCreateFlags>;
+
+ using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
+
+ enum class DisplayModeCreateFlagBitsKHR
+ {
+ };
+
+ using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
+
+ enum class DisplaySurfaceCreateFlagBitsKHR
+ {
+ };
+
+ using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ enum class AndroidSurfaceCreateFlagBitsKHR
+ {
+ };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ enum class MirSurfaceCreateFlagBitsKHR
+ {
+ };
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ enum class ViSurfaceCreateFlagBitsNN
+ {
+ };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ enum class WaylandSurfaceCreateFlagBitsKHR
+ {
+ };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ enum class Win32SurfaceCreateFlagBitsKHR
+ {
+ };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ enum class XlibSurfaceCreateFlagBitsKHR
+ {
+ };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ enum class XcbSurfaceCreateFlagBitsKHR
+ {
+ };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ enum class IOSSurfaceCreateFlagBitsMVK
+ {
+ };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK, VkIOSSurfaceCreateFlagsMVK>;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ enum class MacOSSurfaceCreateFlagBitsMVK
+ {
+ };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK, VkMacOSSurfaceCreateFlagsMVK>;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ enum class CommandPoolTrimFlagBits
+ {
+ };
+
+ using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits, VkCommandPoolTrimFlags>;
+
+ using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
+
+ enum class PipelineViewportSwizzleStateCreateFlagBitsNV
+ {
+ };
+
+ using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV, VkPipelineViewportSwizzleStateCreateFlagsNV>;
+
+ enum class PipelineDiscardRectangleStateCreateFlagBitsEXT
+ {
+ };
+
+ using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT, VkPipelineDiscardRectangleStateCreateFlagsEXT>;
+
+ enum class PipelineCoverageToColorStateCreateFlagBitsNV
+ {
+ };
+
+ using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV, VkPipelineCoverageToColorStateCreateFlagsNV>;
+
+ enum class PipelineCoverageModulationStateCreateFlagBitsNV
+ {
+ };
+
+ using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV, VkPipelineCoverageModulationStateCreateFlagsNV>;
+
+ enum class ValidationCacheCreateFlagBitsEXT
+ {
+ };
+
+ using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT, VkValidationCacheCreateFlagsEXT>;
+
+ enum class DebugUtilsMessengerCreateFlagBitsEXT
+ {
+ };
+
+ using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT, VkDebugUtilsMessengerCreateFlagsEXT>;
+
+ enum class DebugUtilsMessengerCallbackDataFlagBitsEXT
+ {
+ };
+
+ using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT, VkDebugUtilsMessengerCallbackDataFlagsEXT>;
+
+ enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT
+ {
+ };
+
+ using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT, VkPipelineRasterizationConservativeStateCreateFlagsEXT>;
+
+ class DeviceMemory
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DeviceMemory()
+ : m_deviceMemory(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t )
+ : m_deviceMemory(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory )
+ : m_deviceMemory( deviceMemory )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DeviceMemory & operator=(VkDeviceMemory deviceMemory)
+ {
+ m_deviceMemory = deviceMemory;
+ return *this;
+ }
+#endif
+
+ DeviceMemory & operator=( std::nullptr_t )
+ {
+ m_deviceMemory = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DeviceMemory const & rhs ) const
+ {
+ return m_deviceMemory == rhs.m_deviceMemory;
+ }
+
+ bool operator!=(DeviceMemory const & rhs ) const
+ {
+ return m_deviceMemory != rhs.m_deviceMemory;
+ }
+
+ bool operator<(DeviceMemory const & rhs ) const
+ {
+ return m_deviceMemory < rhs.m_deviceMemory;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const
+ {
+ return m_deviceMemory;
+ }
+
+ explicit operator bool() const
+ {
+ return m_deviceMemory != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_deviceMemory == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDeviceMemory m_deviceMemory;
+ };
+
+ static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+
+ class CommandPool
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR CommandPool()
+ : m_commandPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t )
+ : m_commandPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool )
+ : m_commandPool( commandPool )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ CommandPool & operator=(VkCommandPool commandPool)
+ {
+ m_commandPool = commandPool;
+ return *this;
+ }
+#endif
+
+ CommandPool & operator=( std::nullptr_t )
+ {
+ m_commandPool = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( CommandPool const & rhs ) const
+ {
+ return m_commandPool == rhs.m_commandPool;
+ }
+
+ bool operator!=(CommandPool const & rhs ) const
+ {
+ return m_commandPool != rhs.m_commandPool;
+ }
+
+ bool operator<(CommandPool const & rhs ) const
+ {
+ return m_commandPool < rhs.m_commandPool;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const
+ {
+ return m_commandPool;
+ }
+
+ explicit operator bool() const
+ {
+ return m_commandPool != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_commandPool == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkCommandPool m_commandPool;
+ };
+
+ static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+
+ class Buffer
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Buffer()
+ : m_buffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t )
+ : m_buffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer )
+ : m_buffer( buffer )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Buffer & operator=(VkBuffer buffer)
+ {
+ m_buffer = buffer;
+ return *this;
+ }
+#endif
+
+ Buffer & operator=( std::nullptr_t )
+ {
+ m_buffer = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Buffer const & rhs ) const
+ {
+ return m_buffer == rhs.m_buffer;
+ }
+
+ bool operator!=(Buffer const & rhs ) const
+ {
+ return m_buffer != rhs.m_buffer;
+ }
+
+ bool operator<(Buffer const & rhs ) const
+ {
+ return m_buffer < rhs.m_buffer;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const
+ {
+ return m_buffer;
+ }
+
+ explicit operator bool() const
+ {
+ return m_buffer != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_buffer == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkBuffer m_buffer;
+ };
+
+ static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+
+ class BufferView
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR BufferView()
+ : m_bufferView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t )
+ : m_bufferView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView )
+ : m_bufferView( bufferView )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ BufferView & operator=(VkBufferView bufferView)
+ {
+ m_bufferView = bufferView;
+ return *this;
+ }
+#endif
+
+ BufferView & operator=( std::nullptr_t )
+ {
+ m_bufferView = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( BufferView const & rhs ) const
+ {
+ return m_bufferView == rhs.m_bufferView;
+ }
+
+ bool operator!=(BufferView const & rhs ) const
+ {
+ return m_bufferView != rhs.m_bufferView;
+ }
+
+ bool operator<(BufferView const & rhs ) const
+ {
+ return m_bufferView < rhs.m_bufferView;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const
+ {
+ return m_bufferView;
+ }
+
+ explicit operator bool() const
+ {
+ return m_bufferView != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_bufferView == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkBufferView m_bufferView;
+ };
+
+ static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+
+ class Image
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Image()
+ : m_image(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Image( std::nullptr_t )
+ : m_image(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image )
+ : m_image( image )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Image & operator=(VkImage image)
+ {
+ m_image = image;
+ return *this;
+ }
+#endif
+
+ Image & operator=( std::nullptr_t )
+ {
+ m_image = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Image const & rhs ) const
+ {
+ return m_image == rhs.m_image;
+ }
+
+ bool operator!=(Image const & rhs ) const
+ {
+ return m_image != rhs.m_image;
+ }
+
+ bool operator<(Image const & rhs ) const
+ {
+ return m_image < rhs.m_image;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const
+ {
+ return m_image;
+ }
+
+ explicit operator bool() const
+ {
+ return m_image != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_image == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkImage m_image;
+ };
+
+ static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+
+ class ImageView
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ImageView()
+ : m_imageView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t )
+ : m_imageView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView )
+ : m_imageView( imageView )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ImageView & operator=(VkImageView imageView)
+ {
+ m_imageView = imageView;
+ return *this;
+ }
+#endif
+
+ ImageView & operator=( std::nullptr_t )
+ {
+ m_imageView = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( ImageView const & rhs ) const
+ {
+ return m_imageView == rhs.m_imageView;
+ }
+
+ bool operator!=(ImageView const & rhs ) const
+ {
+ return m_imageView != rhs.m_imageView;
+ }
+
+ bool operator<(ImageView const & rhs ) const
+ {
+ return m_imageView < rhs.m_imageView;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const
+ {
+ return m_imageView;
+ }
+
+ explicit operator bool() const
+ {
+ return m_imageView != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_imageView == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkImageView m_imageView;
+ };
+
+ static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+
+ class ShaderModule
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ShaderModule()
+ : m_shaderModule(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t )
+ : m_shaderModule(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule )
+ : m_shaderModule( shaderModule )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ShaderModule & operator=(VkShaderModule shaderModule)
+ {
+ m_shaderModule = shaderModule;
+ return *this;
+ }
+#endif
+
+ ShaderModule & operator=( std::nullptr_t )
+ {
+ m_shaderModule = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( ShaderModule const & rhs ) const
+ {
+ return m_shaderModule == rhs.m_shaderModule;
+ }
+
+ bool operator!=(ShaderModule const & rhs ) const
+ {
+ return m_shaderModule != rhs.m_shaderModule;
+ }
+
+ bool operator<(ShaderModule const & rhs ) const
+ {
+ return m_shaderModule < rhs.m_shaderModule;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const
+ {
+ return m_shaderModule;
+ }
+
+ explicit operator bool() const
+ {
+ return m_shaderModule != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_shaderModule == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkShaderModule m_shaderModule;
+ };
+
+ static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+
+ class Pipeline
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Pipeline()
+ : m_pipeline(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t )
+ : m_pipeline(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline )
+ : m_pipeline( pipeline )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Pipeline & operator=(VkPipeline pipeline)
+ {
+ m_pipeline = pipeline;
+ return *this;
+ }
+#endif
+
+ Pipeline & operator=( std::nullptr_t )
+ {
+ m_pipeline = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Pipeline const & rhs ) const
+ {
+ return m_pipeline == rhs.m_pipeline;
+ }
+
+ bool operator!=(Pipeline const & rhs ) const
+ {
+ return m_pipeline != rhs.m_pipeline;
+ }
+
+ bool operator<(Pipeline const & rhs ) const
+ {
+ return m_pipeline < rhs.m_pipeline;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const
+ {
+ return m_pipeline;
+ }
+
+ explicit operator bool() const
+ {
+ return m_pipeline != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_pipeline == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPipeline m_pipeline;
+ };
+
+ static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+
+ class PipelineLayout
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR PipelineLayout()
+ : m_pipelineLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t )
+ : m_pipelineLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout )
+ : m_pipelineLayout( pipelineLayout )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PipelineLayout & operator=(VkPipelineLayout pipelineLayout)
+ {
+ m_pipelineLayout = pipelineLayout;
+ return *this;
+ }
+#endif
+
+ PipelineLayout & operator=( std::nullptr_t )
+ {
+ m_pipelineLayout = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( PipelineLayout const & rhs ) const
+ {
+ return m_pipelineLayout == rhs.m_pipelineLayout;
+ }
+
+ bool operator!=(PipelineLayout const & rhs ) const
+ {
+ return m_pipelineLayout != rhs.m_pipelineLayout;
+ }
+
+ bool operator<(PipelineLayout const & rhs ) const
+ {
+ return m_pipelineLayout < rhs.m_pipelineLayout;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const
+ {
+ return m_pipelineLayout;
+ }
+
+ explicit operator bool() const
+ {
+ return m_pipelineLayout != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_pipelineLayout == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPipelineLayout m_pipelineLayout;
+ };
+
+ static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+
+ class Sampler
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Sampler()
+ : m_sampler(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t )
+ : m_sampler(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler )
+ : m_sampler( sampler )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Sampler & operator=(VkSampler sampler)
+ {
+ m_sampler = sampler;
+ return *this;
+ }
+#endif
+
+ Sampler & operator=( std::nullptr_t )
+ {
+ m_sampler = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Sampler const & rhs ) const
+ {
+ return m_sampler == rhs.m_sampler;
+ }
+
+ bool operator!=(Sampler const & rhs ) const
+ {
+ return m_sampler != rhs.m_sampler;
+ }
+
+ bool operator<(Sampler const & rhs ) const
+ {
+ return m_sampler < rhs.m_sampler;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const
+ {
+ return m_sampler;
+ }
+
+ explicit operator bool() const
+ {
+ return m_sampler != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_sampler == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSampler m_sampler;
+ };
+
+ static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+
+ class DescriptorSet
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorSet()
+ : m_descriptorSet(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t )
+ : m_descriptorSet(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet )
+ : m_descriptorSet( descriptorSet )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorSet & operator=(VkDescriptorSet descriptorSet)
+ {
+ m_descriptorSet = descriptorSet;
+ return *this;
+ }
+#endif
+
+ DescriptorSet & operator=( std::nullptr_t )
+ {
+ m_descriptorSet = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DescriptorSet const & rhs ) const
+ {
+ return m_descriptorSet == rhs.m_descriptorSet;
+ }
+
+ bool operator!=(DescriptorSet const & rhs ) const
+ {
+ return m_descriptorSet != rhs.m_descriptorSet;
+ }
+
+ bool operator<(DescriptorSet const & rhs ) const
+ {
+ return m_descriptorSet < rhs.m_descriptorSet;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const
+ {
+ return m_descriptorSet;
+ }
+
+ explicit operator bool() const
+ {
+ return m_descriptorSet != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_descriptorSet == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorSet m_descriptorSet;
+ };
+
+ static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+
+ class DescriptorSetLayout
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayout()
+ : m_descriptorSetLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t )
+ : m_descriptorSetLayout(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout )
+ : m_descriptorSetLayout( descriptorSetLayout )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout)
+ {
+ m_descriptorSetLayout = descriptorSetLayout;
+ return *this;
+ }
+#endif
+
+ DescriptorSetLayout & operator=( std::nullptr_t )
+ {
+ m_descriptorSetLayout = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DescriptorSetLayout const & rhs ) const
+ {
+ return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+ }
+
+ bool operator!=(DescriptorSetLayout const & rhs ) const
+ {
+ return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+ }
+
+ bool operator<(DescriptorSetLayout const & rhs ) const
+ {
+ return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const
+ {
+ return m_descriptorSetLayout;
+ }
+
+ explicit operator bool() const
+ {
+ return m_descriptorSetLayout != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_descriptorSetLayout == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorSetLayout m_descriptorSetLayout;
+ };
+
+ static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+
+ class DescriptorPool
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorPool()
+ : m_descriptorPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t )
+ : m_descriptorPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool )
+ : m_descriptorPool( descriptorPool )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorPool & operator=(VkDescriptorPool descriptorPool)
+ {
+ m_descriptorPool = descriptorPool;
+ return *this;
+ }
+#endif
+
+ DescriptorPool & operator=( std::nullptr_t )
+ {
+ m_descriptorPool = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DescriptorPool const & rhs ) const
+ {
+ return m_descriptorPool == rhs.m_descriptorPool;
+ }
+
+ bool operator!=(DescriptorPool const & rhs ) const
+ {
+ return m_descriptorPool != rhs.m_descriptorPool;
+ }
+
+ bool operator<(DescriptorPool const & rhs ) const
+ {
+ return m_descriptorPool < rhs.m_descriptorPool;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const
+ {
+ return m_descriptorPool;
+ }
+
+ explicit operator bool() const
+ {
+ return m_descriptorPool != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_descriptorPool == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorPool m_descriptorPool;
+ };
+
+ static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+
+ class Fence
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Fence()
+ : m_fence(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t )
+ : m_fence(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence )
+ : m_fence( fence )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Fence & operator=(VkFence fence)
+ {
+ m_fence = fence;
+ return *this;
+ }
+#endif
+
+ Fence & operator=( std::nullptr_t )
+ {
+ m_fence = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Fence const & rhs ) const
+ {
+ return m_fence == rhs.m_fence;
+ }
+
+ bool operator!=(Fence const & rhs ) const
+ {
+ return m_fence != rhs.m_fence;
+ }
+
+ bool operator<(Fence const & rhs ) const
+ {
+ return m_fence < rhs.m_fence;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const
+ {
+ return m_fence;
+ }
+
+ explicit operator bool() const
+ {
+ return m_fence != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_fence == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkFence m_fence;
+ };
+
+ static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+
+ class Semaphore
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Semaphore()
+ : m_semaphore(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t )
+ : m_semaphore(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore )
+ : m_semaphore( semaphore )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Semaphore & operator=(VkSemaphore semaphore)
+ {
+ m_semaphore = semaphore;
+ return *this;
+ }
+#endif
+
+ Semaphore & operator=( std::nullptr_t )
+ {
+ m_semaphore = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Semaphore const & rhs ) const
+ {
+ return m_semaphore == rhs.m_semaphore;
+ }
+
+ bool operator!=(Semaphore const & rhs ) const
+ {
+ return m_semaphore != rhs.m_semaphore;
+ }
+
+ bool operator<(Semaphore const & rhs ) const
+ {
+ return m_semaphore < rhs.m_semaphore;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const
+ {
+ return m_semaphore;
+ }
+
+ explicit operator bool() const
+ {
+ return m_semaphore != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_semaphore == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSemaphore m_semaphore;
+ };
+
+ static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+
+ class Event
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Event()
+ : m_event(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Event( std::nullptr_t )
+ : m_event(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event )
+ : m_event( event )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Event & operator=(VkEvent event)
+ {
+ m_event = event;
+ return *this;
+ }
+#endif
+
+ Event & operator=( std::nullptr_t )
+ {
+ m_event = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Event const & rhs ) const
+ {
+ return m_event == rhs.m_event;
+ }
+
+ bool operator!=(Event const & rhs ) const
+ {
+ return m_event != rhs.m_event;
+ }
+
+ bool operator<(Event const & rhs ) const
+ {
+ return m_event < rhs.m_event;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const
+ {
+ return m_event;
+ }
+
+ explicit operator bool() const
+ {
+ return m_event != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_event == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkEvent m_event;
+ };
+
+ static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+ class QueryPool
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR QueryPool()
+ : m_queryPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t )
+ : m_queryPool(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool )
+ : m_queryPool( queryPool )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ QueryPool & operator=(VkQueryPool queryPool)
+ {
+ m_queryPool = queryPool;
+ return *this;
+ }
+#endif
+
+ QueryPool & operator=( std::nullptr_t )
+ {
+ m_queryPool = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( QueryPool const & rhs ) const
+ {
+ return m_queryPool == rhs.m_queryPool;
+ }
+
+ bool operator!=(QueryPool const & rhs ) const
+ {
+ return m_queryPool != rhs.m_queryPool;
+ }
+
+ bool operator<(QueryPool const & rhs ) const
+ {
+ return m_queryPool < rhs.m_queryPool;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const
+ {
+ return m_queryPool;
+ }
+
+ explicit operator bool() const
+ {
+ return m_queryPool != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_queryPool == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkQueryPool m_queryPool;
+ };
+
+ static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+ class Framebuffer
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Framebuffer()
+ : m_framebuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t )
+ : m_framebuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer )
+ : m_framebuffer( framebuffer )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Framebuffer & operator=(VkFramebuffer framebuffer)
+ {
+ m_framebuffer = framebuffer;
+ return *this;
+ }
+#endif
+
+ Framebuffer & operator=( std::nullptr_t )
+ {
+ m_framebuffer = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Framebuffer const & rhs ) const
+ {
+ return m_framebuffer == rhs.m_framebuffer;
+ }
+
+ bool operator!=(Framebuffer const & rhs ) const
+ {
+ return m_framebuffer != rhs.m_framebuffer;
+ }
+
+ bool operator<(Framebuffer const & rhs ) const
+ {
+ return m_framebuffer < rhs.m_framebuffer;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const
+ {
+ return m_framebuffer;
+ }
+
+ explicit operator bool() const
+ {
+ return m_framebuffer != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_framebuffer == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkFramebuffer m_framebuffer;
+ };
+
+ static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+
+ class RenderPass
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR RenderPass()
+ : m_renderPass(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t )
+ : m_renderPass(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass )
+ : m_renderPass( renderPass )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ RenderPass & operator=(VkRenderPass renderPass)
+ {
+ m_renderPass = renderPass;
+ return *this;
+ }
+#endif
+
+ RenderPass & operator=( std::nullptr_t )
+ {
+ m_renderPass = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( RenderPass const & rhs ) const
+ {
+ return m_renderPass == rhs.m_renderPass;
+ }
+
+ bool operator!=(RenderPass const & rhs ) const
+ {
+ return m_renderPass != rhs.m_renderPass;
+ }
+
+ bool operator<(RenderPass const & rhs ) const
+ {
+ return m_renderPass < rhs.m_renderPass;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const
+ {
+ return m_renderPass;
+ }
+
+ explicit operator bool() const
+ {
+ return m_renderPass != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_renderPass == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkRenderPass m_renderPass;
+ };
+
+ static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+
+ class PipelineCache
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR PipelineCache()
+ : m_pipelineCache(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t )
+ : m_pipelineCache(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache )
+ : m_pipelineCache( pipelineCache )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PipelineCache & operator=(VkPipelineCache pipelineCache)
+ {
+ m_pipelineCache = pipelineCache;
+ return *this;
+ }
+#endif
+
+ PipelineCache & operator=( std::nullptr_t )
+ {
+ m_pipelineCache = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( PipelineCache const & rhs ) const
+ {
+ return m_pipelineCache == rhs.m_pipelineCache;
+ }
+
+ bool operator!=(PipelineCache const & rhs ) const
+ {
+ return m_pipelineCache != rhs.m_pipelineCache;
+ }
+
+ bool operator<(PipelineCache const & rhs ) const
+ {
+ return m_pipelineCache < rhs.m_pipelineCache;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const
+ {
+ return m_pipelineCache;
+ }
+
+ explicit operator bool() const
+ {
+ return m_pipelineCache != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_pipelineCache == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPipelineCache m_pipelineCache;
+ };
+
+ static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+
+ class ObjectTableNVX
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ObjectTableNVX()
+ : m_objectTableNVX(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t )
+ : m_objectTableNVX(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX )
+ : m_objectTableNVX( objectTableNVX )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX)
+ {
+ m_objectTableNVX = objectTableNVX;
+ return *this;
+ }
+#endif
+
+ ObjectTableNVX & operator=( std::nullptr_t )
+ {
+ m_objectTableNVX = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( ObjectTableNVX const & rhs ) const
+ {
+ return m_objectTableNVX == rhs.m_objectTableNVX;
+ }
+
+ bool operator!=(ObjectTableNVX const & rhs ) const
+ {
+ return m_objectTableNVX != rhs.m_objectTableNVX;
+ }
+
+ bool operator<(ObjectTableNVX const & rhs ) const
+ {
+ return m_objectTableNVX < rhs.m_objectTableNVX;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const
+ {
+ return m_objectTableNVX;
+ }
+
+ explicit operator bool() const
+ {
+ return m_objectTableNVX != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_objectTableNVX == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkObjectTableNVX m_objectTableNVX;
+ };
+
+ static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
+
+ class IndirectCommandsLayoutNVX
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX()
+ : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t )
+ : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX )
+ : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
+ {
+ m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
+ return *this;
+ }
+#endif
+
+ IndirectCommandsLayoutNVX & operator=( std::nullptr_t )
+ {
+ m_indirectCommandsLayoutNVX = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( IndirectCommandsLayoutNVX const & rhs ) const
+ {
+ return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
+ }
+
+ bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const
+ {
+ return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
+ }
+
+ bool operator<(IndirectCommandsLayoutNVX const & rhs ) const
+ {
+ return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const
+ {
+ return m_indirectCommandsLayoutNVX;
+ }
+
+ explicit operator bool() const
+ {
+ return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
+ };
+
+ static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
+
+ class DescriptorUpdateTemplate
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate()
+ : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t )
+ : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate )
+ : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate)
+ {
+ m_descriptorUpdateTemplate = descriptorUpdateTemplate;
+ return *this;
+ }
+#endif
+
+ DescriptorUpdateTemplate & operator=( std::nullptr_t )
+ {
+ m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DescriptorUpdateTemplate const & rhs ) const
+ {
+ return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
+ }
+
+ bool operator!=(DescriptorUpdateTemplate const & rhs ) const
+ {
+ return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+ }
+
+ bool operator<(DescriptorUpdateTemplate const & rhs ) const
+ {
+ return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const
+ {
+ return m_descriptorUpdateTemplate;
+ }
+
+ explicit operator bool() const
+ {
+ return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
+ };
+
+ static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
+
+ using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+
+ class SamplerYcbcrConversion
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion()
+ : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t )
+ : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion )
+ : m_samplerYcbcrConversion( samplerYcbcrConversion )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion)
+ {
+ m_samplerYcbcrConversion = samplerYcbcrConversion;
+ return *this;
+ }
+#endif
+
+ SamplerYcbcrConversion & operator=( std::nullptr_t )
+ {
+ m_samplerYcbcrConversion = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( SamplerYcbcrConversion const & rhs ) const
+ {
+ return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
+ }
+
+ bool operator!=(SamplerYcbcrConversion const & rhs ) const
+ {
+ return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
+ }
+
+ bool operator<(SamplerYcbcrConversion const & rhs ) const
+ {
+ return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const
+ {
+ return m_samplerYcbcrConversion;
+ }
+
+ explicit operator bool() const
+ {
+ return m_samplerYcbcrConversion != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_samplerYcbcrConversion == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSamplerYcbcrConversion m_samplerYcbcrConversion;
+ };
+
+ static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
+
+ using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
+
+ class ValidationCacheEXT
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ValidationCacheEXT()
+ : m_validationCacheEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t )
+ : m_validationCacheEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT )
+ : m_validationCacheEXT( validationCacheEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT)
+ {
+ m_validationCacheEXT = validationCacheEXT;
+ return *this;
+ }
+#endif
+
+ ValidationCacheEXT & operator=( std::nullptr_t )
+ {
+ m_validationCacheEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( ValidationCacheEXT const & rhs ) const
+ {
+ return m_validationCacheEXT == rhs.m_validationCacheEXT;
+ }
+
+ bool operator!=(ValidationCacheEXT const & rhs ) const
+ {
+ return m_validationCacheEXT != rhs.m_validationCacheEXT;
+ }
+
+ bool operator<(ValidationCacheEXT const & rhs ) const
+ {
+ return m_validationCacheEXT < rhs.m_validationCacheEXT;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const
+ {
+ return m_validationCacheEXT;
+ }
+
+ explicit operator bool() const
+ {
+ return m_validationCacheEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_validationCacheEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkValidationCacheEXT m_validationCacheEXT;
+ };
+
+ static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+
+ class DisplayKHR
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DisplayKHR()
+ : m_displayKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t )
+ : m_displayKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR )
+ : m_displayKHR( displayKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DisplayKHR & operator=(VkDisplayKHR displayKHR)
+ {
+ m_displayKHR = displayKHR;
+ return *this;
+ }
+#endif
+
+ DisplayKHR & operator=( std::nullptr_t )
+ {
+ m_displayKHR = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DisplayKHR const & rhs ) const
+ {
+ return m_displayKHR == rhs.m_displayKHR;
+ }
+
+ bool operator!=(DisplayKHR const & rhs ) const
+ {
+ return m_displayKHR != rhs.m_displayKHR;
+ }
+
+ bool operator<(DisplayKHR const & rhs ) const
+ {
+ return m_displayKHR < rhs.m_displayKHR;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const
+ {
+ return m_displayKHR;
+ }
+
+ explicit operator bool() const
+ {
+ return m_displayKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_displayKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDisplayKHR m_displayKHR;
+ };
+
+ static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+
+ class DisplayModeKHR
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DisplayModeKHR()
+ : m_displayModeKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t )
+ : m_displayModeKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR )
+ : m_displayModeKHR( displayModeKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR)
+ {
+ m_displayModeKHR = displayModeKHR;
+ return *this;
+ }
+#endif
+
+ DisplayModeKHR & operator=( std::nullptr_t )
+ {
+ m_displayModeKHR = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DisplayModeKHR const & rhs ) const
+ {
+ return m_displayModeKHR == rhs.m_displayModeKHR;
+ }
+
+ bool operator!=(DisplayModeKHR const & rhs ) const
+ {
+ return m_displayModeKHR != rhs.m_displayModeKHR;
+ }
+
+ bool operator<(DisplayModeKHR const & rhs ) const
+ {
+ return m_displayModeKHR < rhs.m_displayModeKHR;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const
+ {
+ return m_displayModeKHR;
+ }
+
+ explicit operator bool() const
+ {
+ return m_displayModeKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_displayModeKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDisplayModeKHR m_displayModeKHR;
+ };
+
+ static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+
+ class SurfaceKHR
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR SurfaceKHR()
+ : m_surfaceKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t )
+ : m_surfaceKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR )
+ : m_surfaceKHR( surfaceKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR)
+ {
+ m_surfaceKHR = surfaceKHR;
+ return *this;
+ }
+#endif
+
+ SurfaceKHR & operator=( std::nullptr_t )
+ {
+ m_surfaceKHR = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( SurfaceKHR const & rhs ) const
+ {
+ return m_surfaceKHR == rhs.m_surfaceKHR;
+ }
+
+ bool operator!=(SurfaceKHR const & rhs ) const
+ {
+ return m_surfaceKHR != rhs.m_surfaceKHR;
+ }
+
+ bool operator<(SurfaceKHR const & rhs ) const
+ {
+ return m_surfaceKHR < rhs.m_surfaceKHR;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const
+ {
+ return m_surfaceKHR;
+ }
+
+ explicit operator bool() const
+ {
+ return m_surfaceKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_surfaceKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSurfaceKHR m_surfaceKHR;
+ };
+
+ static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+
+ class SwapchainKHR
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR SwapchainKHR()
+ : m_swapchainKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t )
+ : m_swapchainKHR(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR )
+ : m_swapchainKHR( swapchainKHR )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR)
+ {
+ m_swapchainKHR = swapchainKHR;
+ return *this;
+ }
+#endif
+
+ SwapchainKHR & operator=( std::nullptr_t )
+ {
+ m_swapchainKHR = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( SwapchainKHR const & rhs ) const
+ {
+ return m_swapchainKHR == rhs.m_swapchainKHR;
+ }
+
+ bool operator!=(SwapchainKHR const & rhs ) const
+ {
+ return m_swapchainKHR != rhs.m_swapchainKHR;
+ }
+
+ bool operator<(SwapchainKHR const & rhs ) const
+ {
+ return m_swapchainKHR < rhs.m_swapchainKHR;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const
+ {
+ return m_swapchainKHR;
+ }
+
+ explicit operator bool() const
+ {
+ return m_swapchainKHR != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_swapchainKHR == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSwapchainKHR m_swapchainKHR;
+ };
+
+ static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+
+ class DebugReportCallbackEXT
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT()
+ : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t )
+ : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT )
+ : m_debugReportCallbackEXT( debugReportCallbackEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT)
+ {
+ m_debugReportCallbackEXT = debugReportCallbackEXT;
+ return *this;
+ }
+#endif
+
+ DebugReportCallbackEXT & operator=( std::nullptr_t )
+ {
+ m_debugReportCallbackEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DebugReportCallbackEXT const & rhs ) const
+ {
+ return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+ }
+
+ bool operator!=(DebugReportCallbackEXT const & rhs ) const
+ {
+ return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+ }
+
+ bool operator<(DebugReportCallbackEXT const & rhs ) const
+ {
+ return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const
+ {
+ return m_debugReportCallbackEXT;
+ }
+
+ explicit operator bool() const
+ {
+ return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_debugReportCallbackEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDebugReportCallbackEXT m_debugReportCallbackEXT;
+ };
+
+ static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
+
+ class DebugUtilsMessengerEXT
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT()
+ : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t )
+ : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT )
+ : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT)
+ {
+ m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
+ return *this;
+ }
+#endif
+
+ DebugUtilsMessengerEXT & operator=( std::nullptr_t )
+ {
+ m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( DebugUtilsMessengerEXT const & rhs ) const
+ {
+ return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
+ }
+
+ bool operator!=(DebugUtilsMessengerEXT const & rhs ) const
+ {
+ return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
+ }
+
+ bool operator<(DebugUtilsMessengerEXT const & rhs ) const
+ {
+ return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
+ }
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const
+ {
+ return m_debugUtilsMessengerEXT;
+ }
+
+ explicit operator bool() const
+ {
+ return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
+ };
+
+ static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
+
+ struct Offset2D
+ {
+ Offset2D( int32_t x_ = 0, int32_t y_ = 0 )
+ : x( x_ )
+ , y( y_ )
+ {
+ }
+
+ Offset2D( VkOffset2D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Offset2D ) );
+ }
+
+ Offset2D& operator=( VkOffset2D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Offset2D ) );
+ return *this;
+ }
+ Offset2D& setX( int32_t x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ Offset2D& setY( int32_t y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ operator const VkOffset2D&() const
+ {
+ return *reinterpret_cast<const VkOffset2D*>(this);
+ }
+
+ bool operator==( Offset2D const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y );
+ }
+
+ bool operator!=( Offset2D const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ int32_t x;
+ int32_t y;
+ };
+ static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+
+ struct Offset3D
+ {
+ Offset3D( int32_t x_ = 0, int32_t y_ = 0, int32_t z_ = 0 )
+ : x( x_ )
+ , y( y_ )
+ , z( z_ )
+ {
+ }
+
+ Offset3D( VkOffset3D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Offset3D ) );
+ }
+
+ Offset3D& operator=( VkOffset3D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Offset3D ) );
+ return *this;
+ }
+ Offset3D& setX( int32_t x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ Offset3D& setY( int32_t y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ Offset3D& setZ( int32_t z_ )
+ {
+ z = z_;
+ return *this;
+ }
+
+ operator const VkOffset3D&() const
+ {
+ return *reinterpret_cast<const VkOffset3D*>(this);
+ }
+
+ bool operator==( Offset3D const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y )
+ && ( z == rhs.z );
+ }
+
+ bool operator!=( Offset3D const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ int32_t x;
+ int32_t y;
+ int32_t z;
+ };
+ static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+
+ struct Extent2D
+ {
+ Extent2D( uint32_t width_ = 0, uint32_t height_ = 0 )
+ : width( width_ )
+ , height( height_ )
+ {
+ }
+
+ Extent2D( VkExtent2D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Extent2D ) );
+ }
+
+ Extent2D& operator=( VkExtent2D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Extent2D ) );
+ return *this;
+ }
+ Extent2D& setWidth( uint32_t width_ )
+ {
+ width = width_;
+ return *this;
+ }
+
+ Extent2D& setHeight( uint32_t height_ )
+ {
+ height = height_;
+ return *this;
+ }
+
+ operator const VkExtent2D&() const
+ {
+ return *reinterpret_cast<const VkExtent2D*>(this);
+ }
+
+ bool operator==( Extent2D const& rhs ) const
+ {
+ return ( width == rhs.width )
+ && ( height == rhs.height );
+ }
+
+ bool operator!=( Extent2D const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t width;
+ uint32_t height;
+ };
+ static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+
+ struct Extent3D
+ {
+ Extent3D( uint32_t width_ = 0, uint32_t height_ = 0, uint32_t depth_ = 0 )
+ : width( width_ )
+ , height( height_ )
+ , depth( depth_ )
+ {
+ }
+
+ Extent3D( VkExtent3D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Extent3D ) );
+ }
+
+ Extent3D& operator=( VkExtent3D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Extent3D ) );
+ return *this;
+ }
+ Extent3D& setWidth( uint32_t width_ )
+ {
+ width = width_;
+ return *this;
+ }
+
+ Extent3D& setHeight( uint32_t height_ )
+ {
+ height = height_;
+ return *this;
+ }
+
+ Extent3D& setDepth( uint32_t depth_ )
+ {
+ depth = depth_;
+ return *this;
+ }
+
+ operator const VkExtent3D&() const
+ {
+ return *reinterpret_cast<const VkExtent3D*>(this);
+ }
+
+ bool operator==( Extent3D const& rhs ) const
+ {
+ return ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( depth == rhs.depth );
+ }
+
+ bool operator!=( Extent3D const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t width;
+ uint32_t height;
+ uint32_t depth;
+ };
+ static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+
+ struct Viewport
+ {
+ Viewport( float x_ = 0, float y_ = 0, float width_ = 0, float height_ = 0, float minDepth_ = 0, float maxDepth_ = 0 )
+ : x( x_ )
+ , y( y_ )
+ , width( width_ )
+ , height( height_ )
+ , minDepth( minDepth_ )
+ , maxDepth( maxDepth_ )
+ {
+ }
+
+ Viewport( VkViewport const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Viewport ) );
+ }
+
+ Viewport& operator=( VkViewport const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Viewport ) );
+ return *this;
+ }
+ Viewport& setX( float x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ Viewport& setY( float y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ Viewport& setWidth( float width_ )
+ {
+ width = width_;
+ return *this;
+ }
+
+ Viewport& setHeight( float height_ )
+ {
+ height = height_;
+ return *this;
+ }
+
+ Viewport& setMinDepth( float minDepth_ )
+ {
+ minDepth = minDepth_;
+ return *this;
+ }
+
+ Viewport& setMaxDepth( float maxDepth_ )
+ {
+ maxDepth = maxDepth_;
+ return *this;
+ }
+
+ operator const VkViewport&() const
+ {
+ return *reinterpret_cast<const VkViewport*>(this);
+ }
+
+ bool operator==( Viewport const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y )
+ && ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( minDepth == rhs.minDepth )
+ && ( maxDepth == rhs.maxDepth );
+ }
+
+ bool operator!=( Viewport const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ float x;
+ float y;
+ float width;
+ float height;
+ float minDepth;
+ float maxDepth;
+ };
+ static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+
+ struct Rect2D
+ {
+ Rect2D( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D() )
+ : offset( offset_ )
+ , extent( extent_ )
+ {
+ }
+
+ Rect2D( VkRect2D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Rect2D ) );
+ }
+
+ Rect2D& operator=( VkRect2D const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Rect2D ) );
+ return *this;
+ }
+ Rect2D& setOffset( Offset2D offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ Rect2D& setExtent( Extent2D extent_ )
+ {
+ extent = extent_;
+ return *this;
+ }
+
+ operator const VkRect2D&() const
+ {
+ return *reinterpret_cast<const VkRect2D*>(this);
+ }
+
+ bool operator==( Rect2D const& rhs ) const
+ {
+ return ( offset == rhs.offset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( Rect2D const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Offset2D offset;
+ Extent2D extent;
+ };
+ static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+
+ struct ClearRect
+ {
+ ClearRect( Rect2D rect_ = Rect2D(), uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
+ : rect( rect_ )
+ , baseArrayLayer( baseArrayLayer_ )
+ , layerCount( layerCount_ )
+ {
+ }
+
+ ClearRect( VkClearRect const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ClearRect ) );
+ }
+
+ ClearRect& operator=( VkClearRect const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ClearRect ) );
+ return *this;
+ }
+ ClearRect& setRect( Rect2D rect_ )
+ {
+ rect = rect_;
+ return *this;
+ }
+
+ ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ )
+ {
+ baseArrayLayer = baseArrayLayer_;
+ return *this;
+ }
+
+ ClearRect& setLayerCount( uint32_t layerCount_ )
+ {
+ layerCount = layerCount_;
+ return *this;
+ }
+
+ operator const VkClearRect&() const
+ {
+ return *reinterpret_cast<const VkClearRect*>(this);
+ }
+
+ bool operator==( ClearRect const& rhs ) const
+ {
+ return ( rect == rhs.rect )
+ && ( baseArrayLayer == rhs.baseArrayLayer )
+ && ( layerCount == rhs.layerCount );
+ }
+
+ bool operator!=( ClearRect const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Rect2D rect;
+ uint32_t baseArrayLayer;
+ uint32_t layerCount;
+ };
+ static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+
+ struct ExtensionProperties
+ {
+ operator const VkExtensionProperties&() const
+ {
+ return *reinterpret_cast<const VkExtensionProperties*>(this);
+ }
+
+ bool operator==( ExtensionProperties const& rhs ) const
+ {
+ return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( specVersion == rhs.specVersion );
+ }
+
+ bool operator!=( ExtensionProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+ uint32_t specVersion;
+ };
+ static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+
+ struct LayerProperties
+ {
+ operator const VkLayerProperties&() const
+ {
+ return *reinterpret_cast<const VkLayerProperties*>(this);
+ }
+
+ bool operator==( LayerProperties const& rhs ) const
+ {
+ return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( specVersion == rhs.specVersion )
+ && ( implementationVersion == rhs.implementationVersion )
+ && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+ }
+
+ bool operator!=( LayerProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ char layerName[VK_MAX_EXTENSION_NAME_SIZE];
+ uint32_t specVersion;
+ uint32_t implementationVersion;
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ };
+ static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+
+ struct AllocationCallbacks
+ {
+ AllocationCallbacks( void* pUserData_ = nullptr, PFN_vkAllocationFunction pfnAllocation_ = nullptr, PFN_vkReallocationFunction pfnReallocation_ = nullptr, PFN_vkFreeFunction pfnFree_ = nullptr, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr )
+ : pUserData( pUserData_ )
+ , pfnAllocation( pfnAllocation_ )
+ , pfnReallocation( pfnReallocation_ )
+ , pfnFree( pfnFree_ )
+ , pfnInternalAllocation( pfnInternalAllocation_ )
+ , pfnInternalFree( pfnInternalFree_ )
+ {
+ }
+
+ AllocationCallbacks( VkAllocationCallbacks const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AllocationCallbacks ) );
+ }
+
+ AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AllocationCallbacks ) );
+ return *this;
+ }
+ AllocationCallbacks& setPUserData( void* pUserData_ )
+ {
+ pUserData = pUserData_;
+ return *this;
+ }
+
+ AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ )
+ {
+ pfnAllocation = pfnAllocation_;
+ return *this;
+ }
+
+ AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ )
+ {
+ pfnReallocation = pfnReallocation_;
+ return *this;
+ }
+
+ AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ )
+ {
+ pfnFree = pfnFree_;
+ return *this;
+ }
+
+ AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ )
+ {
+ pfnInternalAllocation = pfnInternalAllocation_;
+ return *this;
+ }
+
+ AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ )
+ {
+ pfnInternalFree = pfnInternalFree_;
+ return *this;
+ }
+
+ operator const VkAllocationCallbacks&() const
+ {
+ return *reinterpret_cast<const VkAllocationCallbacks*>(this);
+ }
+
+ bool operator==( AllocationCallbacks const& rhs ) const
+ {
+ return ( pUserData == rhs.pUserData )
+ && ( pfnAllocation == rhs.pfnAllocation )
+ && ( pfnReallocation == rhs.pfnReallocation )
+ && ( pfnFree == rhs.pfnFree )
+ && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
+ && ( pfnInternalFree == rhs.pfnInternalFree );
+ }
+
+ bool operator!=( AllocationCallbacks const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ void* pUserData;
+ PFN_vkAllocationFunction pfnAllocation;
+ PFN_vkReallocationFunction pfnReallocation;
+ PFN_vkFreeFunction pfnFree;
+ PFN_vkInternalAllocationNotification pfnInternalAllocation;
+ PFN_vkInternalFreeNotification pfnInternalFree;
+ };
+ static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+
+ struct MemoryRequirements
+ {
+ operator const VkMemoryRequirements&() const
+ {
+ return *reinterpret_cast<const VkMemoryRequirements*>(this);
+ }
+
+ bool operator==( MemoryRequirements const& rhs ) const
+ {
+ return ( size == rhs.size )
+ && ( alignment == rhs.alignment )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryRequirements const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DeviceSize size;
+ DeviceSize alignment;
+ uint32_t memoryTypeBits;
+ };
+ static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+
+ struct DescriptorBufferInfo
+ {
+ DescriptorBufferInfo( Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize range_ = 0 )
+ : buffer( buffer_ )
+ , offset( offset_ )
+ , range( range_ )
+ {
+ }
+
+ DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) );
+ }
+
+ DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) );
+ return *this;
+ }
+ DescriptorBufferInfo& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ DescriptorBufferInfo& setOffset( DeviceSize offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ DescriptorBufferInfo& setRange( DeviceSize range_ )
+ {
+ range = range_;
+ return *this;
+ }
+
+ operator const VkDescriptorBufferInfo&() const
+ {
+ return *reinterpret_cast<const VkDescriptorBufferInfo*>(this);
+ }
+
+ bool operator==( DescriptorBufferInfo const& rhs ) const
+ {
+ return ( buffer == rhs.buffer )
+ && ( offset == rhs.offset )
+ && ( range == rhs.range );
+ }
+
+ bool operator!=( DescriptorBufferInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Buffer buffer;
+ DeviceSize offset;
+ DeviceSize range;
+ };
+ static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+
+ struct SubresourceLayout
+ {
+ operator const VkSubresourceLayout&() const
+ {
+ return *reinterpret_cast<const VkSubresourceLayout*>(this);
+ }
+
+ bool operator==( SubresourceLayout const& rhs ) const
+ {
+ return ( offset == rhs.offset )
+ && ( size == rhs.size )
+ && ( rowPitch == rhs.rowPitch )
+ && ( arrayPitch == rhs.arrayPitch )
+ && ( depthPitch == rhs.depthPitch );
+ }
+
+ bool operator!=( SubresourceLayout const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DeviceSize offset;
+ DeviceSize size;
+ DeviceSize rowPitch;
+ DeviceSize arrayPitch;
+ DeviceSize depthPitch;
+ };
+ static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+
+ struct BufferCopy
+ {
+ BufferCopy( DeviceSize srcOffset_ = 0, DeviceSize dstOffset_ = 0, DeviceSize size_ = 0 )
+ : srcOffset( srcOffset_ )
+ , dstOffset( dstOffset_ )
+ , size( size_ )
+ {
+ }
+
+ BufferCopy( VkBufferCopy const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferCopy ) );
+ }
+
+ BufferCopy& operator=( VkBufferCopy const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferCopy ) );
+ return *this;
+ }
+ BufferCopy& setSrcOffset( DeviceSize srcOffset_ )
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ BufferCopy& setDstOffset( DeviceSize dstOffset_ )
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ BufferCopy& setSize( DeviceSize size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ operator const VkBufferCopy&() const
+ {
+ return *reinterpret_cast<const VkBufferCopy*>(this);
+ }
+
+ bool operator==( BufferCopy const& rhs ) const
+ {
+ return ( srcOffset == rhs.srcOffset )
+ && ( dstOffset == rhs.dstOffset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( BufferCopy const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DeviceSize srcOffset;
+ DeviceSize dstOffset;
+ DeviceSize size;
+ };
+ static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+
+ struct SpecializationMapEntry
+ {
+ SpecializationMapEntry( uint32_t constantID_ = 0, uint32_t offset_ = 0, size_t size_ = 0 )
+ : constantID( constantID_ )
+ , offset( offset_ )
+ , size( size_ )
+ {
+ }
+
+ SpecializationMapEntry( VkSpecializationMapEntry const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SpecializationMapEntry ) );
+ }
+
+ SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SpecializationMapEntry ) );
+ return *this;
+ }
+ SpecializationMapEntry& setConstantID( uint32_t constantID_ )
+ {
+ constantID = constantID_;
+ return *this;
+ }
+
+ SpecializationMapEntry& setOffset( uint32_t offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ SpecializationMapEntry& setSize( size_t size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ operator const VkSpecializationMapEntry&() const
+ {
+ return *reinterpret_cast<const VkSpecializationMapEntry*>(this);
+ }
+
+ bool operator==( SpecializationMapEntry const& rhs ) const
+ {
+ return ( constantID == rhs.constantID )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( SpecializationMapEntry const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t constantID;
+ uint32_t offset;
+ size_t size;
+ };
+ static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+
+ struct SpecializationInfo
+ {
+ SpecializationInfo( uint32_t mapEntryCount_ = 0, const SpecializationMapEntry* pMapEntries_ = nullptr, size_t dataSize_ = 0, const void* pData_ = nullptr )
+ : mapEntryCount( mapEntryCount_ )
+ , pMapEntries( pMapEntries_ )
+ , dataSize( dataSize_ )
+ , pData( pData_ )
+ {
+ }
+
+ SpecializationInfo( VkSpecializationInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SpecializationInfo ) );
+ }
+
+ SpecializationInfo& operator=( VkSpecializationInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SpecializationInfo ) );
+ return *this;
+ }
+ SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ )
+ {
+ mapEntryCount = mapEntryCount_;
+ return *this;
+ }
+
+ SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ )
+ {
+ pMapEntries = pMapEntries_;
+ return *this;
+ }
+
+ SpecializationInfo& setDataSize( size_t dataSize_ )
+ {
+ dataSize = dataSize_;
+ return *this;
+ }
+
+ SpecializationInfo& setPData( const void* pData_ )
+ {
+ pData = pData_;
+ return *this;
+ }
+
+ operator const VkSpecializationInfo&() const
+ {
+ return *reinterpret_cast<const VkSpecializationInfo*>(this);
+ }
+
+ bool operator==( SpecializationInfo const& rhs ) const
+ {
+ return ( mapEntryCount == rhs.mapEntryCount )
+ && ( pMapEntries == rhs.pMapEntries )
+ && ( dataSize == rhs.dataSize )
+ && ( pData == rhs.pData );
+ }
+
+ bool operator!=( SpecializationInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t mapEntryCount;
+ const SpecializationMapEntry* pMapEntries;
+ size_t dataSize;
+ const void* pData;
+ };
+ static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+
+ union ClearColorValue
+ {
+ ClearColorValue( const std::array<float,4>& float32_ = { {0} } )
+ {
+ memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
+ }
+
+ ClearColorValue( const std::array<int32_t,4>& int32_ )
+ {
+ memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
+ }
+
+ ClearColorValue( const std::array<uint32_t,4>& uint32_ )
+ {
+ memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+ }
+
+ ClearColorValue& setFloat32( std::array<float,4> float32_ )
+ {
+ memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
+ return *this;
+ }
+
+ ClearColorValue& setInt32( std::array<int32_t,4> int32_ )
+ {
+ memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
+ return *this;
+ }
+
+ ClearColorValue& setUint32( std::array<uint32_t,4> uint32_ )
+ {
+ memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+ return *this;
+ }
+
+ operator VkClearColorValue const& () const
+ {
+ return *reinterpret_cast<const VkClearColorValue*>(this);
+ }
+
+ float float32[4];
+ int32_t int32[4];
+ uint32_t uint32[4];
+ };
+
+ struct ClearDepthStencilValue
+ {
+ ClearDepthStencilValue( float depth_ = 0, uint32_t stencil_ = 0 )
+ : depth( depth_ )
+ , stencil( stencil_ )
+ {
+ }
+
+ ClearDepthStencilValue( VkClearDepthStencilValue const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) );
+ }
+
+ ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) );
+ return *this;
+ }
+ ClearDepthStencilValue& setDepth( float depth_ )
+ {
+ depth = depth_;
+ return *this;
+ }
+
+ ClearDepthStencilValue& setStencil( uint32_t stencil_ )
+ {
+ stencil = stencil_;
+ return *this;
+ }
+
+ operator const VkClearDepthStencilValue&() const
+ {
+ return *reinterpret_cast<const VkClearDepthStencilValue*>(this);
+ }
+
+ bool operator==( ClearDepthStencilValue const& rhs ) const
+ {
+ return ( depth == rhs.depth )
+ && ( stencil == rhs.stencil );
+ }
+
+ bool operator!=( ClearDepthStencilValue const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ float depth;
+ uint32_t stencil;
+ };
+ static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
+
+ union ClearValue
+ {
+ ClearValue( ClearColorValue color_ = ClearColorValue() )
+ {
+ color = color_;
+ }
+
+ ClearValue( ClearDepthStencilValue depthStencil_ )
+ {
+ depthStencil = depthStencil_;
+ }
+
+ ClearValue& setColor( ClearColorValue color_ )
+ {
+ color = color_;
+ return *this;
+ }
+
+ ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ )
+ {
+ depthStencil = depthStencil_;
+ return *this;
+ }
+
+ operator VkClearValue const& () const
+ {
+ return *reinterpret_cast<const VkClearValue*>(this);
+ }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ ClearColorValue color;
+ ClearDepthStencilValue depthStencil;
+#else
+ VkClearColorValue color;
+ VkClearDepthStencilValue depthStencil;
+#endif // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ };
+
+ struct PhysicalDeviceFeatures
+ {
+ PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, Bool32 fullDrawIndexUint32_ = 0, Bool32 imageCubeArray_ = 0, Bool32 independentBlend_ = 0, Bool32 geometryShader_ = 0, Bool32 tessellationShader_ = 0, Bool32 sampleRateShading_ = 0, Bool32 dualSrcBlend_ = 0, Bool32 logicOp_ = 0, Bool32 multiDrawIndirect_ = 0, Bool32 drawIndirectFirstInstance_ = 0, Bool32 depthClamp_ = 0, Bool32 depthBiasClamp_ = 0, Bool32 fillModeNonSolid_ = 0, Bool32 depthBounds_ = 0, Bool32 wideLines_ = 0, Bool32 largePoints_ = 0, Bool32 alphaToOne_ = 0, Bool32 multiViewport_ = 0, Bool32 samplerAnisotropy_ = 0, Bool32 textureCompressionETC2_ = 0, Bool32 textureCompressionASTC_LDR_ = 0, Bool32 textureCompressionBC_ = 0, Bool32 occlusionQueryPrecise_ = 0, Bool32 pipelineStatisticsQuery_ = 0, Bool32 vertexPipelineStoresAndAtomics_ = 0, Bool32 fragmentStoresAndAtomics_ = 0, Bool32 shaderTessellationAndGeometryPointSize_ = 0, Bool32 shaderImageGatherExtended_ = 0, Bool32 shaderStorageImageExtendedFormats_ = 0, Bool32 shaderStorageImageMultisample_ = 0, Bool32 shaderStorageImageReadWithoutFormat_ = 0, Bool32 shaderStorageImageWriteWithoutFormat_ = 0, Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, Bool32 shaderClipDistance_ = 0, Bool32 shaderCullDistance_ = 0, Bool32 shaderFloat64_ = 0, Bool32 shaderInt64_ = 0, Bool32 shaderInt16_ = 0, Bool32 shaderResourceResidency_ = 0, Bool32 shaderResourceMinLod_ = 0, Bool32 sparseBinding_ = 0, Bool32 sparseResidencyBuffer_ = 0, Bool32 sparseResidencyImage2D_ = 0, Bool32 sparseResidencyImage3D_ = 0, Bool32 sparseResidency2Samples_ = 0, Bool32 sparseResidency4Samples_ = 0, Bool32 sparseResidency8Samples_ = 0, Bool32 sparseResidency16Samples_ = 0, Bool32 sparseResidencyAliased_ = 0, Bool32 variableMultisampleRate_ = 0, Bool32 inheritedQueries_ = 0 )
+ : robustBufferAccess( robustBufferAccess_ )
+ , fullDrawIndexUint32( fullDrawIndexUint32_ )
+ , imageCubeArray( imageCubeArray_ )
+ , independentBlend( independentBlend_ )
+ , geometryShader( geometryShader_ )
+ , tessellationShader( tessellationShader_ )
+ , sampleRateShading( sampleRateShading_ )
+ , dualSrcBlend( dualSrcBlend_ )
+ , logicOp( logicOp_ )
+ , multiDrawIndirect( multiDrawIndirect_ )
+ , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
+ , depthClamp( depthClamp_ )
+ , depthBiasClamp( depthBiasClamp_ )
+ , fillModeNonSolid( fillModeNonSolid_ )
+ , depthBounds( depthBounds_ )
+ , wideLines( wideLines_ )
+ , largePoints( largePoints_ )
+ , alphaToOne( alphaToOne_ )
+ , multiViewport( multiViewport_ )
+ , samplerAnisotropy( samplerAnisotropy_ )
+ , textureCompressionETC2( textureCompressionETC2_ )
+ , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
+ , textureCompressionBC( textureCompressionBC_ )
+ , occlusionQueryPrecise( occlusionQueryPrecise_ )
+ , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
+ , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
+ , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
+ , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
+ , shaderImageGatherExtended( shaderImageGatherExtended_ )
+ , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
+ , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
+ , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
+ , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
+ , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
+ , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
+ , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
+ , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
+ , shaderClipDistance( shaderClipDistance_ )
+ , shaderCullDistance( shaderCullDistance_ )
+ , shaderFloat64( shaderFloat64_ )
+ , shaderInt64( shaderInt64_ )
+ , shaderInt16( shaderInt16_ )
+ , shaderResourceResidency( shaderResourceResidency_ )
+ , shaderResourceMinLod( shaderResourceMinLod_ )
+ , sparseBinding( sparseBinding_ )
+ , sparseResidencyBuffer( sparseResidencyBuffer_ )
+ , sparseResidencyImage2D( sparseResidencyImage2D_ )
+ , sparseResidencyImage3D( sparseResidencyImage3D_ )
+ , sparseResidency2Samples( sparseResidency2Samples_ )
+ , sparseResidency4Samples( sparseResidency4Samples_ )
+ , sparseResidency8Samples( sparseResidency8Samples_ )
+ , sparseResidency16Samples( sparseResidency16Samples_ )
+ , sparseResidencyAliased( sparseResidencyAliased_ )
+ , variableMultisampleRate( variableMultisampleRate_ )
+ , inheritedQueries( inheritedQueries_ )
+ {
+ }
+
+ PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) );
+ }
+
+ PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) );
+ return *this;
+ }
+ PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ )
+ {
+ robustBufferAccess = robustBufferAccess_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ )
+ {
+ fullDrawIndexUint32 = fullDrawIndexUint32_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ )
+ {
+ imageCubeArray = imageCubeArray_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ )
+ {
+ independentBlend = independentBlend_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ )
+ {
+ geometryShader = geometryShader_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ )
+ {
+ tessellationShader = tessellationShader_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ )
+ {
+ sampleRateShading = sampleRateShading_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ )
+ {
+ dualSrcBlend = dualSrcBlend_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ )
+ {
+ logicOp = logicOp_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ )
+ {
+ multiDrawIndirect = multiDrawIndirect_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ )
+ {
+ drawIndirectFirstInstance = drawIndirectFirstInstance_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ )
+ {
+ depthClamp = depthClamp_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ )
+ {
+ depthBiasClamp = depthBiasClamp_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ )
+ {
+ fillModeNonSolid = fillModeNonSolid_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ )
+ {
+ depthBounds = depthBounds_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ )
+ {
+ wideLines = wideLines_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ )
+ {
+ largePoints = largePoints_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ )
+ {
+ alphaToOne = alphaToOne_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ )
+ {
+ multiViewport = multiViewport_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ )
+ {
+ samplerAnisotropy = samplerAnisotropy_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ )
+ {
+ textureCompressionETC2 = textureCompressionETC2_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ )
+ {
+ textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ )
+ {
+ textureCompressionBC = textureCompressionBC_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ )
+ {
+ occlusionQueryPrecise = occlusionQueryPrecise_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ )
+ {
+ pipelineStatisticsQuery = pipelineStatisticsQuery_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ )
+ {
+ vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ )
+ {
+ fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ )
+ {
+ shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ )
+ {
+ shaderImageGatherExtended = shaderImageGatherExtended_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ )
+ {
+ shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ )
+ {
+ shaderStorageImageMultisample = shaderStorageImageMultisample_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ )
+ {
+ shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ )
+ {
+ shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ )
+ {
+ shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ )
+ {
+ shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ )
+ {
+ shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ )
+ {
+ shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ )
+ {
+ shaderClipDistance = shaderClipDistance_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ )
+ {
+ shaderCullDistance = shaderCullDistance_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ )
+ {
+ shaderFloat64 = shaderFloat64_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ )
+ {
+ shaderInt64 = shaderInt64_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ )
+ {
+ shaderInt16 = shaderInt16_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ )
+ {
+ shaderResourceResidency = shaderResourceResidency_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ )
+ {
+ shaderResourceMinLod = shaderResourceMinLod_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ )
+ {
+ sparseBinding = sparseBinding_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ )
+ {
+ sparseResidencyBuffer = sparseResidencyBuffer_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ )
+ {
+ sparseResidencyImage2D = sparseResidencyImage2D_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ )
+ {
+ sparseResidencyImage3D = sparseResidencyImage3D_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ )
+ {
+ sparseResidency2Samples = sparseResidency2Samples_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ )
+ {
+ sparseResidency4Samples = sparseResidency4Samples_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ )
+ {
+ sparseResidency8Samples = sparseResidency8Samples_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ )
+ {
+ sparseResidency16Samples = sparseResidency16Samples_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ )
+ {
+ sparseResidencyAliased = sparseResidencyAliased_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ )
+ {
+ variableMultisampleRate = variableMultisampleRate_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ )
+ {
+ inheritedQueries = inheritedQueries_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDeviceFeatures const& rhs ) const
+ {
+ return ( robustBufferAccess == rhs.robustBufferAccess )
+ && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
+ && ( imageCubeArray == rhs.imageCubeArray )
+ && ( independentBlend == rhs.independentBlend )
+ && ( geometryShader == rhs.geometryShader )
+ && ( tessellationShader == rhs.tessellationShader )
+ && ( sampleRateShading == rhs.sampleRateShading )
+ && ( dualSrcBlend == rhs.dualSrcBlend )
+ && ( logicOp == rhs.logicOp )
+ && ( multiDrawIndirect == rhs.multiDrawIndirect )
+ && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
+ && ( depthClamp == rhs.depthClamp )
+ && ( depthBiasClamp == rhs.depthBiasClamp )
+ && ( fillModeNonSolid == rhs.fillModeNonSolid )
+ && ( depthBounds == rhs.depthBounds )
+ && ( wideLines == rhs.wideLines )
+ && ( largePoints == rhs.largePoints )
+ && ( alphaToOne == rhs.alphaToOne )
+ && ( multiViewport == rhs.multiViewport )
+ && ( samplerAnisotropy == rhs.samplerAnisotropy )
+ && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
+ && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
+ && ( textureCompressionBC == rhs.textureCompressionBC )
+ && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
+ && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
+ && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
+ && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
+ && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
+ && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
+ && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
+ && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
+ && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
+ && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
+ && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
+ && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
+ && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
+ && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
+ && ( shaderClipDistance == rhs.shaderClipDistance )
+ && ( shaderCullDistance == rhs.shaderCullDistance )
+ && ( shaderFloat64 == rhs.shaderFloat64 )
+ && ( shaderInt64 == rhs.shaderInt64 )
+ && ( shaderInt16 == rhs.shaderInt16 )
+ && ( shaderResourceResidency == rhs.shaderResourceResidency )
+ && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
+ && ( sparseBinding == rhs.sparseBinding )
+ && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
+ && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
+ && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
+ && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
+ && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
+ && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
+ && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
+ && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
+ && ( variableMultisampleRate == rhs.variableMultisampleRate )
+ && ( inheritedQueries == rhs.inheritedQueries );
+ }
+
+ bool operator!=( PhysicalDeviceFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Bool32 robustBufferAccess;
+ Bool32 fullDrawIndexUint32;
+ Bool32 imageCubeArray;
+ Bool32 independentBlend;
+ Bool32 geometryShader;
+ Bool32 tessellationShader;
+ Bool32 sampleRateShading;
+ Bool32 dualSrcBlend;
+ Bool32 logicOp;
+ Bool32 multiDrawIndirect;
+ Bool32 drawIndirectFirstInstance;
+ Bool32 depthClamp;
+ Bool32 depthBiasClamp;
+ Bool32 fillModeNonSolid;
+ Bool32 depthBounds;
+ Bool32 wideLines;
+ Bool32 largePoints;
+ Bool32 alphaToOne;
+ Bool32 multiViewport;
+ Bool32 samplerAnisotropy;
+ Bool32 textureCompressionETC2;
+ Bool32 textureCompressionASTC_LDR;
+ Bool32 textureCompressionBC;
+ Bool32 occlusionQueryPrecise;
+ Bool32 pipelineStatisticsQuery;
+ Bool32 vertexPipelineStoresAndAtomics;
+ Bool32 fragmentStoresAndAtomics;
+ Bool32 shaderTessellationAndGeometryPointSize;
+ Bool32 shaderImageGatherExtended;
+ Bool32 shaderStorageImageExtendedFormats;
+ Bool32 shaderStorageImageMultisample;
+ Bool32 shaderStorageImageReadWithoutFormat;
+ Bool32 shaderStorageImageWriteWithoutFormat;
+ Bool32 shaderUniformBufferArrayDynamicIndexing;
+ Bool32 shaderSampledImageArrayDynamicIndexing;
+ Bool32 shaderStorageBufferArrayDynamicIndexing;
+ Bool32 shaderStorageImageArrayDynamicIndexing;
+ Bool32 shaderClipDistance;
+ Bool32 shaderCullDistance;
+ Bool32 shaderFloat64;
+ Bool32 shaderInt64;
+ Bool32 shaderInt16;
+ Bool32 shaderResourceResidency;
+ Bool32 shaderResourceMinLod;
+ Bool32 sparseBinding;
+ Bool32 sparseResidencyBuffer;
+ Bool32 sparseResidencyImage2D;
+ Bool32 sparseResidencyImage3D;
+ Bool32 sparseResidency2Samples;
+ Bool32 sparseResidency4Samples;
+ Bool32 sparseResidency8Samples;
+ Bool32 sparseResidency16Samples;
+ Bool32 sparseResidencyAliased;
+ Bool32 variableMultisampleRate;
+ Bool32 inheritedQueries;
+ };
+ static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceSparseProperties
+ {
+ operator const VkPhysicalDeviceSparseProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSparseProperties const& rhs ) const
+ {
+ return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
+ && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
+ && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
+ && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
+ && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+ }
+
+ bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Bool32 residencyStandard2DBlockShape;
+ Bool32 residencyStandard2DMultisampleBlockShape;
+ Bool32 residencyStandard3DBlockShape;
+ Bool32 residencyAlignedMipSize;
+ Bool32 residencyNonResidentStrict;
+ };
+ static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
+
+ struct DrawIndirectCommand
+ {
+ DrawIndirectCommand( uint32_t vertexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstVertex_ = 0, uint32_t firstInstance_ = 0 )
+ : vertexCount( vertexCount_ )
+ , instanceCount( instanceCount_ )
+ , firstVertex( firstVertex_ )
+ , firstInstance( firstInstance_ )
+ {
+ }
+
+ DrawIndirectCommand( VkDrawIndirectCommand const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DrawIndirectCommand ) );
+ }
+
+ DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DrawIndirectCommand ) );
+ return *this;
+ }
+ DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ )
+ {
+ vertexCount = vertexCount_;
+ return *this;
+ }
+
+ DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
+ {
+ instanceCount = instanceCount_;
+ return *this;
+ }
+
+ DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ )
+ {
+ firstVertex = firstVertex_;
+ return *this;
+ }
+
+ DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
+ {
+ firstInstance = firstInstance_;
+ return *this;
+ }
+
+ operator const VkDrawIndirectCommand&() const
+ {
+ return *reinterpret_cast<const VkDrawIndirectCommand*>(this);
+ }
+
+ bool operator==( DrawIndirectCommand const& rhs ) const
+ {
+ return ( vertexCount == rhs.vertexCount )
+ && ( instanceCount == rhs.instanceCount )
+ && ( firstVertex == rhs.firstVertex )
+ && ( firstInstance == rhs.firstInstance );
+ }
+
+ bool operator!=( DrawIndirectCommand const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t vertexCount;
+ uint32_t instanceCount;
+ uint32_t firstVertex;
+ uint32_t firstInstance;
+ };
+ static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+
+ struct DrawIndexedIndirectCommand
+ {
+ DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstIndex_ = 0, int32_t vertexOffset_ = 0, uint32_t firstInstance_ = 0 )
+ : indexCount( indexCount_ )
+ , instanceCount( instanceCount_ )
+ , firstIndex( firstIndex_ )
+ , vertexOffset( vertexOffset_ )
+ , firstInstance( firstInstance_ )
+ {
+ }
+
+ DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) );
+ }
+
+ DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) );
+ return *this;
+ }
+ DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ )
+ {
+ indexCount = indexCount_;
+ return *this;
+ }
+
+ DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
+ {
+ instanceCount = instanceCount_;
+ return *this;
+ }
+
+ DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ )
+ {
+ firstIndex = firstIndex_;
+ return *this;
+ }
+
+ DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ )
+ {
+ vertexOffset = vertexOffset_;
+ return *this;
+ }
+
+ DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
+ {
+ firstInstance = firstInstance_;
+ return *this;
+ }
+
+ operator const VkDrawIndexedIndirectCommand&() const
+ {
+ return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>(this);
+ }
+
+ bool operator==( DrawIndexedIndirectCommand const& rhs ) const
+ {
+ return ( indexCount == rhs.indexCount )
+ && ( instanceCount == rhs.instanceCount )
+ && ( firstIndex == rhs.firstIndex )
+ && ( vertexOffset == rhs.vertexOffset )
+ && ( firstInstance == rhs.firstInstance );
+ }
+
+ bool operator!=( DrawIndexedIndirectCommand const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t indexCount;
+ uint32_t instanceCount;
+ uint32_t firstIndex;
+ int32_t vertexOffset;
+ uint32_t firstInstance;
+ };
+ static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
+
+ struct DispatchIndirectCommand
+ {
+ DispatchIndirectCommand( uint32_t x_ = 0, uint32_t y_ = 0, uint32_t z_ = 0 )
+ : x( x_ )
+ , y( y_ )
+ , z( z_ )
+ {
+ }
+
+ DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) );
+ }
+
+ DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) );
+ return *this;
+ }
+ DispatchIndirectCommand& setX( uint32_t x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ DispatchIndirectCommand& setY( uint32_t y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ DispatchIndirectCommand& setZ( uint32_t z_ )
+ {
+ z = z_;
+ return *this;
+ }
+
+ operator const VkDispatchIndirectCommand&() const
+ {
+ return *reinterpret_cast<const VkDispatchIndirectCommand*>(this);
+ }
+
+ bool operator==( DispatchIndirectCommand const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y )
+ && ( z == rhs.z );
+ }
+
+ bool operator!=( DispatchIndirectCommand const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t x;
+ uint32_t y;
+ uint32_t z;
+ };
+ static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
+
+ struct DisplayPlanePropertiesKHR
+ {
+ operator const VkDisplayPlanePropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>(this);
+ }
+
+ bool operator==( DisplayPlanePropertiesKHR const& rhs ) const
+ {
+ return ( currentDisplay == rhs.currentDisplay )
+ && ( currentStackIndex == rhs.currentStackIndex );
+ }
+
+ bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DisplayKHR currentDisplay;
+ uint32_t currentStackIndex;
+ };
+ static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
+
+ struct DisplayModeParametersKHR
+ {
+ DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), uint32_t refreshRate_ = 0 )
+ : visibleRegion( visibleRegion_ )
+ , refreshRate( refreshRate_ )
+ {
+ }
+
+ DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) );
+ }
+
+ DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) );
+ return *this;
+ }
+ DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ )
+ {
+ visibleRegion = visibleRegion_;
+ return *this;
+ }
+
+ DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ )
+ {
+ refreshRate = refreshRate_;
+ return *this;
+ }
+
+ operator const VkDisplayModeParametersKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayModeParametersKHR*>(this);
+ }
+
+ bool operator==( DisplayModeParametersKHR const& rhs ) const
+ {
+ return ( visibleRegion == rhs.visibleRegion )
+ && ( refreshRate == rhs.refreshRate );
+ }
+
+ bool operator!=( DisplayModeParametersKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Extent2D visibleRegion;
+ uint32_t refreshRate;
+ };
+ static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
+
+ struct DisplayModePropertiesKHR
+ {
+ operator const VkDisplayModePropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayModePropertiesKHR*>(this);
+ }
+
+ bool operator==( DisplayModePropertiesKHR const& rhs ) const
+ {
+ return ( displayMode == rhs.displayMode )
+ && ( parameters == rhs.parameters );
+ }
+
+ bool operator!=( DisplayModePropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DisplayModeKHR displayMode;
+ DisplayModeParametersKHR parameters;
+ };
+ static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
+
+ struct RectLayerKHR
+ {
+ RectLayerKHR( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D(), uint32_t layer_ = 0 )
+ : offset( offset_ )
+ , extent( extent_ )
+ , layer( layer_ )
+ {
+ }
+
+ RectLayerKHR( VkRectLayerKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RectLayerKHR ) );
+ }
+
+ RectLayerKHR& operator=( VkRectLayerKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RectLayerKHR ) );
+ return *this;
+ }
+ RectLayerKHR& setOffset( Offset2D offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ RectLayerKHR& setExtent( Extent2D extent_ )
+ {
+ extent = extent_;
+ return *this;
+ }
+
+ RectLayerKHR& setLayer( uint32_t layer_ )
+ {
+ layer = layer_;
+ return *this;
+ }
+
+ operator const VkRectLayerKHR&() const
+ {
+ return *reinterpret_cast<const VkRectLayerKHR*>(this);
+ }
+
+ bool operator==( RectLayerKHR const& rhs ) const
+ {
+ return ( offset == rhs.offset )
+ && ( extent == rhs.extent )
+ && ( layer == rhs.layer );
+ }
+
+ bool operator!=( RectLayerKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Offset2D offset;
+ Extent2D extent;
+ uint32_t layer;
+ };
+ static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+
+ struct PresentRegionKHR
+ {
+ PresentRegionKHR( uint32_t rectangleCount_ = 0, const RectLayerKHR* pRectangles_ = nullptr )
+ : rectangleCount( rectangleCount_ )
+ , pRectangles( pRectangles_ )
+ {
+ }
+
+ PresentRegionKHR( VkPresentRegionKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentRegionKHR ) );
+ }
+
+ PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentRegionKHR ) );
+ return *this;
+ }
+ PresentRegionKHR& setRectangleCount( uint32_t rectangleCount_ )
+ {
+ rectangleCount = rectangleCount_;
+ return *this;
+ }
+
+ PresentRegionKHR& setPRectangles( const RectLayerKHR* pRectangles_ )
+ {
+ pRectangles = pRectangles_;
+ return *this;
+ }
+
+ operator const VkPresentRegionKHR&() const
+ {
+ return *reinterpret_cast<const VkPresentRegionKHR*>(this);
+ }
+
+ bool operator==( PresentRegionKHR const& rhs ) const
+ {
+ return ( rectangleCount == rhs.rectangleCount )
+ && ( pRectangles == rhs.pRectangles );
+ }
+
+ bool operator!=( PresentRegionKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t rectangleCount;
+ const RectLayerKHR* pRectangles;
+ };
+ static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+
+ struct XYColorEXT
+ {
+ XYColorEXT( float x_ = 0, float y_ = 0 )
+ : x( x_ )
+ , y( y_ )
+ {
+ }
+
+ XYColorEXT( VkXYColorEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( XYColorEXT ) );
+ }
+
+ XYColorEXT& operator=( VkXYColorEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( XYColorEXT ) );
+ return *this;
+ }
+ XYColorEXT& setX( float x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ XYColorEXT& setY( float y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ operator const VkXYColorEXT&() const
+ {
+ return *reinterpret_cast<const VkXYColorEXT*>(this);
+ }
+
+ bool operator==( XYColorEXT const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y );
+ }
+
+ bool operator!=( XYColorEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ float x;
+ float y;
+ };
+ static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+
+ struct RefreshCycleDurationGOOGLE
+ {
+ RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = 0 )
+ : refreshDuration( refreshDuration_ )
+ {
+ }
+
+ RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RefreshCycleDurationGOOGLE ) );
+ }
+
+ RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RefreshCycleDurationGOOGLE ) );
+ return *this;
+ }
+ RefreshCycleDurationGOOGLE& setRefreshDuration( uint64_t refreshDuration_ )
+ {
+ refreshDuration = refreshDuration_;
+ return *this;
+ }
+
+ operator const VkRefreshCycleDurationGOOGLE&() const
+ {
+ return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>(this);
+ }
+
+ bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const
+ {
+ return ( refreshDuration == rhs.refreshDuration );
+ }
+
+ bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint64_t refreshDuration;
+ };
+ static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
+
+ struct PastPresentationTimingGOOGLE
+ {
+ PastPresentationTimingGOOGLE( uint32_t presentID_ = 0, uint64_t desiredPresentTime_ = 0, uint64_t actualPresentTime_ = 0, uint64_t earliestPresentTime_ = 0, uint64_t presentMargin_ = 0 )
+ : presentID( presentID_ )
+ , desiredPresentTime( desiredPresentTime_ )
+ , actualPresentTime( actualPresentTime_ )
+ , earliestPresentTime( earliestPresentTime_ )
+ , presentMargin( presentMargin_ )
+ {
+ }
+
+ PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PastPresentationTimingGOOGLE ) );
+ }
+
+ PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PastPresentationTimingGOOGLE ) );
+ return *this;
+ }
+ PastPresentationTimingGOOGLE& setPresentID( uint32_t presentID_ )
+ {
+ presentID = presentID_;
+ return *this;
+ }
+
+ PastPresentationTimingGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ )
+ {
+ desiredPresentTime = desiredPresentTime_;
+ return *this;
+ }
+
+ PastPresentationTimingGOOGLE& setActualPresentTime( uint64_t actualPresentTime_ )
+ {
+ actualPresentTime = actualPresentTime_;
+ return *this;
+ }
+
+ PastPresentationTimingGOOGLE& setEarliestPresentTime( uint64_t earliestPresentTime_ )
+ {
+ earliestPresentTime = earliestPresentTime_;
+ return *this;
+ }
+
+ PastPresentationTimingGOOGLE& setPresentMargin( uint64_t presentMargin_ )
+ {
+ presentMargin = presentMargin_;
+ return *this;
+ }
+
+ operator const VkPastPresentationTimingGOOGLE&() const
+ {
+ return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>(this);
+ }
+
+ bool operator==( PastPresentationTimingGOOGLE const& rhs ) const
+ {
+ return ( presentID == rhs.presentID )
+ && ( desiredPresentTime == rhs.desiredPresentTime )
+ && ( actualPresentTime == rhs.actualPresentTime )
+ && ( earliestPresentTime == rhs.earliestPresentTime )
+ && ( presentMargin == rhs.presentMargin );
+ }
+
+ bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t presentID;
+ uint64_t desiredPresentTime;
+ uint64_t actualPresentTime;
+ uint64_t earliestPresentTime;
+ uint64_t presentMargin;
+ };
+ static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
+
+ struct PresentTimeGOOGLE
+ {
+ PresentTimeGOOGLE( uint32_t presentID_ = 0, uint64_t desiredPresentTime_ = 0 )
+ : presentID( presentID_ )
+ , desiredPresentTime( desiredPresentTime_ )
+ {
+ }
+
+ PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) );
+ }
+
+ PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) );
+ return *this;
+ }
+ PresentTimeGOOGLE& setPresentID( uint32_t presentID_ )
+ {
+ presentID = presentID_;
+ return *this;
+ }
+
+ PresentTimeGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ )
+ {
+ desiredPresentTime = desiredPresentTime_;
+ return *this;
+ }
+
+ operator const VkPresentTimeGOOGLE&() const
+ {
+ return *reinterpret_cast<const VkPresentTimeGOOGLE*>(this);
+ }
+
+ bool operator==( PresentTimeGOOGLE const& rhs ) const
+ {
+ return ( presentID == rhs.presentID )
+ && ( desiredPresentTime == rhs.desiredPresentTime );
+ }
+
+ bool operator!=( PresentTimeGOOGLE const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t presentID;
+ uint64_t desiredPresentTime;
+ };
+ static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+
+ struct ViewportWScalingNV
+ {
+ ViewportWScalingNV( float xcoeff_ = 0, float ycoeff_ = 0 )
+ : xcoeff( xcoeff_ )
+ , ycoeff( ycoeff_ )
+ {
+ }
+
+ ViewportWScalingNV( VkViewportWScalingNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ViewportWScalingNV ) );
+ }
+
+ ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ViewportWScalingNV ) );
+ return *this;
+ }
+ ViewportWScalingNV& setXcoeff( float xcoeff_ )
+ {
+ xcoeff = xcoeff_;
+ return *this;
+ }
+
+ ViewportWScalingNV& setYcoeff( float ycoeff_ )
+ {
+ ycoeff = ycoeff_;
+ return *this;
+ }
+
+ operator const VkViewportWScalingNV&() const
+ {
+ return *reinterpret_cast<const VkViewportWScalingNV*>(this);
+ }
+
+ bool operator==( ViewportWScalingNV const& rhs ) const
+ {
+ return ( xcoeff == rhs.xcoeff )
+ && ( ycoeff == rhs.ycoeff );
+ }
+
+ bool operator!=( ViewportWScalingNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ float xcoeff;
+ float ycoeff;
+ };
+ static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+
+ struct SampleLocationEXT
+ {
+ SampleLocationEXT( float x_ = 0, float y_ = 0 )
+ : x( x_ )
+ , y( y_ )
+ {
+ }
+
+ SampleLocationEXT( VkSampleLocationEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SampleLocationEXT ) );
+ }
+
+ SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SampleLocationEXT ) );
+ return *this;
+ }
+ SampleLocationEXT& setX( float x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ SampleLocationEXT& setY( float y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ operator const VkSampleLocationEXT&() const
+ {
+ return *reinterpret_cast<const VkSampleLocationEXT*>(this);
+ }
+
+ bool operator==( SampleLocationEXT const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y );
+ }
+
+ bool operator!=( SampleLocationEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ float x;
+ float y;
+ };
+ static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
+
+ struct ShaderResourceUsageAMD
+ {
+ operator const VkShaderResourceUsageAMD&() const
+ {
+ return *reinterpret_cast<const VkShaderResourceUsageAMD*>(this);
+ }
+
+ bool operator==( ShaderResourceUsageAMD const& rhs ) const
+ {
+ return ( numUsedVgprs == rhs.numUsedVgprs )
+ && ( numUsedSgprs == rhs.numUsedSgprs )
+ && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
+ && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
+ && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
+ }
+
+ bool operator!=( ShaderResourceUsageAMD const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t numUsedVgprs;
+ uint32_t numUsedSgprs;
+ uint32_t ldsSizePerLocalWorkGroup;
+ size_t ldsUsageSizeInBytes;
+ size_t scratchMemUsageInBytes;
+ };
+ static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
+
+ struct VertexInputBindingDivisorDescriptionEXT
+ {
+ VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0, uint32_t divisor_ = 0 )
+ : binding( binding_ )
+ , divisor( divisor_ )
+ {
+ }
+
+ VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) );
+ }
+
+ VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) );
+ return *this;
+ }
+ VertexInputBindingDivisorDescriptionEXT& setBinding( uint32_t binding_ )
+ {
+ binding = binding_;
+ return *this;
+ }
+
+ VertexInputBindingDivisorDescriptionEXT& setDivisor( uint32_t divisor_ )
+ {
+ divisor = divisor_;
+ return *this;
+ }
+
+ operator const VkVertexInputBindingDivisorDescriptionEXT&() const
+ {
+ return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>(this);
+ }
+
+ bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const
+ {
+ return ( binding == rhs.binding )
+ && ( divisor == rhs.divisor );
+ }
+
+ bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t binding;
+ uint32_t divisor;
+ };
+ static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
+
+ enum class ImageLayout
+ {
+ eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
+ eGeneral = VK_IMAGE_LAYOUT_GENERAL,
+ eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+ eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+ eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+ eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
+ eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+ eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+ eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+ eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+ ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+ eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR
+ };
+
+ struct DescriptorImageInfo
+ {
+ DescriptorImageInfo( Sampler sampler_ = Sampler(), ImageView imageView_ = ImageView(), ImageLayout imageLayout_ = ImageLayout::eUndefined )
+ : sampler( sampler_ )
+ , imageView( imageView_ )
+ , imageLayout( imageLayout_ )
+ {
+ }
+
+ DescriptorImageInfo( VkDescriptorImageInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorImageInfo ) );
+ }
+
+ DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorImageInfo ) );
+ return *this;
+ }
+ DescriptorImageInfo& setSampler( Sampler sampler_ )
+ {
+ sampler = sampler_;
+ return *this;
+ }
+
+ DescriptorImageInfo& setImageView( ImageView imageView_ )
+ {
+ imageView = imageView_;
+ return *this;
+ }
+
+ DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ )
+ {
+ imageLayout = imageLayout_;
+ return *this;
+ }
+
+ operator const VkDescriptorImageInfo&() const
+ {
+ return *reinterpret_cast<const VkDescriptorImageInfo*>(this);
+ }
+
+ bool operator==( DescriptorImageInfo const& rhs ) const
+ {
+ return ( sampler == rhs.sampler )
+ && ( imageView == rhs.imageView )
+ && ( imageLayout == rhs.imageLayout );
+ }
+
+ bool operator!=( DescriptorImageInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Sampler sampler;
+ ImageView imageView;
+ ImageLayout imageLayout;
+ };
+ static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+
+ struct AttachmentReference
+ {
+ AttachmentReference( uint32_t attachment_ = 0, ImageLayout layout_ = ImageLayout::eUndefined )
+ : attachment( attachment_ )
+ , layout( layout_ )
+ {
+ }
+
+ AttachmentReference( VkAttachmentReference const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AttachmentReference ) );
+ }
+
+ AttachmentReference& operator=( VkAttachmentReference const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AttachmentReference ) );
+ return *this;
+ }
+ AttachmentReference& setAttachment( uint32_t attachment_ )
+ {
+ attachment = attachment_;
+ return *this;
+ }
+
+ AttachmentReference& setLayout( ImageLayout layout_ )
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ operator const VkAttachmentReference&() const
+ {
+ return *reinterpret_cast<const VkAttachmentReference*>(this);
+ }
+
+ bool operator==( AttachmentReference const& rhs ) const
+ {
+ return ( attachment == rhs.attachment )
+ && ( layout == rhs.layout );
+ }
+
+ bool operator!=( AttachmentReference const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t attachment;
+ ImageLayout layout;
+ };
+ static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+
+ enum class AttachmentLoadOp
+ {
+ eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
+ eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
+ eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
+ };
+
+ enum class AttachmentStoreOp
+ {
+ eStore = VK_ATTACHMENT_STORE_OP_STORE,
+ eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
+ };
+
+ enum class ImageType
+ {
+ e1D = VK_IMAGE_TYPE_1D,
+ e2D = VK_IMAGE_TYPE_2D,
+ e3D = VK_IMAGE_TYPE_3D
+ };
+
+ enum class ImageTiling
+ {
+ eOptimal = VK_IMAGE_TILING_OPTIMAL,
+ eLinear = VK_IMAGE_TILING_LINEAR
+ };
+
+ enum class ImageViewType
+ {
+ e1D = VK_IMAGE_VIEW_TYPE_1D,
+ e2D = VK_IMAGE_VIEW_TYPE_2D,
+ e3D = VK_IMAGE_VIEW_TYPE_3D,
+ eCube = VK_IMAGE_VIEW_TYPE_CUBE,
+ e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+ e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+ eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
+ };
+
+ enum class CommandBufferLevel
+ {
+ ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+ eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
+ };
+
+ enum class ComponentSwizzle
+ {
+ eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
+ eZero = VK_COMPONENT_SWIZZLE_ZERO,
+ eOne = VK_COMPONENT_SWIZZLE_ONE,
+ eR = VK_COMPONENT_SWIZZLE_R,
+ eG = VK_COMPONENT_SWIZZLE_G,
+ eB = VK_COMPONENT_SWIZZLE_B,
+ eA = VK_COMPONENT_SWIZZLE_A
+ };
+
+ struct ComponentMapping
+ {
+ ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity )
+ : r( r_ )
+ , g( g_ )
+ , b( b_ )
+ , a( a_ )
+ {
+ }
+
+ ComponentMapping( VkComponentMapping const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ComponentMapping ) );
+ }
+
+ ComponentMapping& operator=( VkComponentMapping const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ComponentMapping ) );
+ return *this;
+ }
+ ComponentMapping& setR( ComponentSwizzle r_ )
+ {
+ r = r_;
+ return *this;
+ }
+
+ ComponentMapping& setG( ComponentSwizzle g_ )
+ {
+ g = g_;
+ return *this;
+ }
+
+ ComponentMapping& setB( ComponentSwizzle b_ )
+ {
+ b = b_;
+ return *this;
+ }
+
+ ComponentMapping& setA( ComponentSwizzle a_ )
+ {
+ a = a_;
+ return *this;
+ }
+
+ operator const VkComponentMapping&() const
+ {
+ return *reinterpret_cast<const VkComponentMapping*>(this);
+ }
+
+ bool operator==( ComponentMapping const& rhs ) const
+ {
+ return ( r == rhs.r )
+ && ( g == rhs.g )
+ && ( b == rhs.b )
+ && ( a == rhs.a );
+ }
+
+ bool operator!=( ComponentMapping const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ComponentSwizzle r;
+ ComponentSwizzle g;
+ ComponentSwizzle b;
+ ComponentSwizzle a;
+ };
+ static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+
+ enum class DescriptorType
+ {
+ eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
+ eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+ eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+ eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
+ eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
+ eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+ eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+ eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+ eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+ eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+ eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
+ };
+
+ struct DescriptorPoolSize
+ {
+ DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0 )
+ : type( type_ )
+ , descriptorCount( descriptorCount_ )
+ {
+ }
+
+ DescriptorPoolSize( VkDescriptorPoolSize const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorPoolSize ) );
+ }
+
+ DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorPoolSize ) );
+ return *this;
+ }
+ DescriptorPoolSize& setType( DescriptorType type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ )
+ {
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+ operator const VkDescriptorPoolSize&() const
+ {
+ return *reinterpret_cast<const VkDescriptorPoolSize*>(this);
+ }
+
+ bool operator==( DescriptorPoolSize const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( descriptorCount == rhs.descriptorCount );
+ }
+
+ bool operator!=( DescriptorPoolSize const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DescriptorType type;
+ uint32_t descriptorCount;
+ };
+ static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+
+ struct DescriptorUpdateTemplateEntry
+ {
+ DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, size_t offset_ = 0, size_t stride_ = 0 )
+ : dstBinding( dstBinding_ )
+ , dstArrayElement( dstArrayElement_ )
+ , descriptorCount( descriptorCount_ )
+ , descriptorType( descriptorType_ )
+ , offset( offset_ )
+ , stride( stride_ )
+ {
+ }
+
+ DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) );
+ }
+
+ DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) );
+ return *this;
+ }
+ DescriptorUpdateTemplateEntry& setDstBinding( uint32_t dstBinding_ )
+ {
+ dstBinding = dstBinding_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateEntry& setDstArrayElement( uint32_t dstArrayElement_ )
+ {
+ dstArrayElement = dstArrayElement_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateEntry& setDescriptorCount( uint32_t descriptorCount_ )
+ {
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateEntry& setDescriptorType( DescriptorType descriptorType_ )
+ {
+ descriptorType = descriptorType_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateEntry& setOffset( size_t offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateEntry& setStride( size_t stride_ )
+ {
+ stride = stride_;
+ return *this;
+ }
+
+ operator const VkDescriptorUpdateTemplateEntry&() const
+ {
+ return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>(this);
+ }
+
+ bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const
+ {
+ return ( dstBinding == rhs.dstBinding )
+ && ( dstArrayElement == rhs.dstArrayElement )
+ && ( descriptorCount == rhs.descriptorCount )
+ && ( descriptorType == rhs.descriptorType )
+ && ( offset == rhs.offset )
+ && ( stride == rhs.stride );
+ }
+
+ bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t dstBinding;
+ uint32_t dstArrayElement;
+ uint32_t descriptorCount;
+ DescriptorType descriptorType;
+ size_t offset;
+ size_t stride;
+ };
+ static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
+
+ using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
+
+ enum class QueryType
+ {
+ eOcclusion = VK_QUERY_TYPE_OCCLUSION,
+ ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+ eTimestamp = VK_QUERY_TYPE_TIMESTAMP
+ };
+
+ enum class BorderColor
+ {
+ eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+ eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
+ eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
+ eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
+ eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+ eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
+ };
+
+ enum class PipelineBindPoint
+ {
+ eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+ eCompute = VK_PIPELINE_BIND_POINT_COMPUTE
+ };
+
+ enum class PipelineCacheHeaderVersion
+ {
+ eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+ };
+
+ enum class PrimitiveTopology
+ {
+ ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+ eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+ eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+ eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+ eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+ eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+ eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+ eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+ eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+ eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
+ ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
+ };
+
+ enum class SharingMode
+ {
+ eExclusive = VK_SHARING_MODE_EXCLUSIVE,
+ eConcurrent = VK_SHARING_MODE_CONCURRENT
+ };
+
+ enum class IndexType
+ {
+ eUint16 = VK_INDEX_TYPE_UINT16,
+ eUint32 = VK_INDEX_TYPE_UINT32
+ };
+
+ enum class Filter
+ {
+ eNearest = VK_FILTER_NEAREST,
+ eLinear = VK_FILTER_LINEAR,
+ eCubicIMG = VK_FILTER_CUBIC_IMG
+ };
+
+ enum class SamplerMipmapMode
+ {
+ eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+ eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
+ };
+
+ enum class SamplerAddressMode
+ {
+ eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+ eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+ eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+ eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+ eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
+ };
+
+ enum class CompareOp
+ {
+ eNever = VK_COMPARE_OP_NEVER,
+ eLess = VK_COMPARE_OP_LESS,
+ eEqual = VK_COMPARE_OP_EQUAL,
+ eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
+ eGreater = VK_COMPARE_OP_GREATER,
+ eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
+ eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
+ eAlways = VK_COMPARE_OP_ALWAYS
+ };
+
+ enum class PolygonMode
+ {
+ eFill = VK_POLYGON_MODE_FILL,
+ eLine = VK_POLYGON_MODE_LINE,
+ ePoint = VK_POLYGON_MODE_POINT,
+ eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
+ };
+
+ enum class CullModeFlagBits
+ {
+ eNone = VK_CULL_MODE_NONE,
+ eFront = VK_CULL_MODE_FRONT_BIT,
+ eBack = VK_CULL_MODE_BACK_BIT,
+ eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+ };
+
+ using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
+
+ VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
+ {
+ return CullModeFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
+ {
+ return ~( CullModeFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CullModeFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
+ };
+ };
+
+ enum class FrontFace
+ {
+ eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+ eClockwise = VK_FRONT_FACE_CLOCKWISE
+ };
+
+ enum class BlendFactor
+ {
+ eZero = VK_BLEND_FACTOR_ZERO,
+ eOne = VK_BLEND_FACTOR_ONE,
+ eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
+ eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
+ eDstColor = VK_BLEND_FACTOR_DST_COLOR,
+ eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
+ eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
+ eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
+ eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
+ eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
+ eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
+ eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
+ eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
+ eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
+ eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
+ eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
+ eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
+ eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
+ eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
+ };
+
+ enum class BlendOp
+ {
+ eAdd = VK_BLEND_OP_ADD,
+ eSubtract = VK_BLEND_OP_SUBTRACT,
+ eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
+ eMin = VK_BLEND_OP_MIN,
+ eMax = VK_BLEND_OP_MAX,
+ eZeroEXT = VK_BLEND_OP_ZERO_EXT,
+ eSrcEXT = VK_BLEND_OP_SRC_EXT,
+ eDstEXT = VK_BLEND_OP_DST_EXT,
+ eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT,
+ eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT,
+ eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT,
+ eDstInEXT = VK_BLEND_OP_DST_IN_EXT,
+ eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT,
+ eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT,
+ eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT,
+ eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT,
+ eXorEXT = VK_BLEND_OP_XOR_EXT,
+ eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT,
+ eScreenEXT = VK_BLEND_OP_SCREEN_EXT,
+ eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT,
+ eDarkenEXT = VK_BLEND_OP_DARKEN_EXT,
+ eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT,
+ eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT,
+ eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT,
+ eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT,
+ eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT,
+ eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT,
+ eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT,
+ eInvertEXT = VK_BLEND_OP_INVERT_EXT,
+ eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT,
+ eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT,
+ eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT,
+ eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT,
+ eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT,
+ ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT,
+ eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT,
+ eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT,
+ eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT,
+ eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT,
+ eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
+ ePlusEXT = VK_BLEND_OP_PLUS_EXT,
+ ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT,
+ ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
+ ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT,
+ eMinusEXT = VK_BLEND_OP_MINUS_EXT,
+ eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT,
+ eContrastEXT = VK_BLEND_OP_CONTRAST_EXT,
+ eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT,
+ eRedEXT = VK_BLEND_OP_RED_EXT,
+ eGreenEXT = VK_BLEND_OP_GREEN_EXT,
+ eBlueEXT = VK_BLEND_OP_BLUE_EXT
+ };
+
+ enum class StencilOp
+ {
+ eKeep = VK_STENCIL_OP_KEEP,
+ eZero = VK_STENCIL_OP_ZERO,
+ eReplace = VK_STENCIL_OP_REPLACE,
+ eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+ eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+ eInvert = VK_STENCIL_OP_INVERT,
+ eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
+ eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+ };
+
+ struct StencilOpState
+ {
+ StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = 0, uint32_t writeMask_ = 0, uint32_t reference_ = 0 )
+ : failOp( failOp_ )
+ , passOp( passOp_ )
+ , depthFailOp( depthFailOp_ )
+ , compareOp( compareOp_ )
+ , compareMask( compareMask_ )
+ , writeMask( writeMask_ )
+ , reference( reference_ )
+ {
+ }
+
+ StencilOpState( VkStencilOpState const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( StencilOpState ) );
+ }
+
+ StencilOpState& operator=( VkStencilOpState const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( StencilOpState ) );
+ return *this;
+ }
+ StencilOpState& setFailOp( StencilOp failOp_ )
+ {
+ failOp = failOp_;
+ return *this;
+ }
+
+ StencilOpState& setPassOp( StencilOp passOp_ )
+ {
+ passOp = passOp_;
+ return *this;
+ }
+
+ StencilOpState& setDepthFailOp( StencilOp depthFailOp_ )
+ {
+ depthFailOp = depthFailOp_;
+ return *this;
+ }
+
+ StencilOpState& setCompareOp( CompareOp compareOp_ )
+ {
+ compareOp = compareOp_;
+ return *this;
+ }
+
+ StencilOpState& setCompareMask( uint32_t compareMask_ )
+ {
+ compareMask = compareMask_;
+ return *this;
+ }
+
+ StencilOpState& setWriteMask( uint32_t writeMask_ )
+ {
+ writeMask = writeMask_;
+ return *this;
+ }
+
+ StencilOpState& setReference( uint32_t reference_ )
+ {
+ reference = reference_;
+ return *this;
+ }
+
+ operator const VkStencilOpState&() const
+ {
+ return *reinterpret_cast<const VkStencilOpState*>(this);
+ }
+
+ bool operator==( StencilOpState const& rhs ) const
+ {
+ return ( failOp == rhs.failOp )
+ && ( passOp == rhs.passOp )
+ && ( depthFailOp == rhs.depthFailOp )
+ && ( compareOp == rhs.compareOp )
+ && ( compareMask == rhs.compareMask )
+ && ( writeMask == rhs.writeMask )
+ && ( reference == rhs.reference );
+ }
+
+ bool operator!=( StencilOpState const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ StencilOp failOp;
+ StencilOp passOp;
+ StencilOp depthFailOp;
+ CompareOp compareOp;
+ uint32_t compareMask;
+ uint32_t writeMask;
+ uint32_t reference;
+ };
+ static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+
+ enum class LogicOp
+ {
+ eClear = VK_LOGIC_OP_CLEAR,
+ eAnd = VK_LOGIC_OP_AND,
+ eAndReverse = VK_LOGIC_OP_AND_REVERSE,
+ eCopy = VK_LOGIC_OP_COPY,
+ eAndInverted = VK_LOGIC_OP_AND_INVERTED,
+ eNoOp = VK_LOGIC_OP_NO_OP,
+ eXor = VK_LOGIC_OP_XOR,
+ eOr = VK_LOGIC_OP_OR,
+ eNor = VK_LOGIC_OP_NOR,
+ eEquivalent = VK_LOGIC_OP_EQUIVALENT,
+ eInvert = VK_LOGIC_OP_INVERT,
+ eOrReverse = VK_LOGIC_OP_OR_REVERSE,
+ eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
+ eOrInverted = VK_LOGIC_OP_OR_INVERTED,
+ eNand = VK_LOGIC_OP_NAND,
+ eSet = VK_LOGIC_OP_SET
+ };
+
+ enum class InternalAllocationType
+ {
+ eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+ };
+
+ enum class SystemAllocationScope
+ {
+ eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+ eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
+ eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
+ eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
+ eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
+ };
+
+ enum class PhysicalDeviceType
+ {
+ eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+ eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
+ eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
+ eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
+ eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+ };
+
+ enum class VertexInputRate
+ {
+ eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
+ eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+ };
+
+ struct VertexInputBindingDescription
+ {
+ VertexInputBindingDescription( uint32_t binding_ = 0, uint32_t stride_ = 0, VertexInputRate inputRate_ = VertexInputRate::eVertex )
+ : binding( binding_ )
+ , stride( stride_ )
+ , inputRate( inputRate_ )
+ {
+ }
+
+ VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) );
+ }
+
+ VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) );
+ return *this;
+ }
+ VertexInputBindingDescription& setBinding( uint32_t binding_ )
+ {
+ binding = binding_;
+ return *this;
+ }
+
+ VertexInputBindingDescription& setStride( uint32_t stride_ )
+ {
+ stride = stride_;
+ return *this;
+ }
+
+ VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ )
+ {
+ inputRate = inputRate_;
+ return *this;
+ }
+
+ operator const VkVertexInputBindingDescription&() const
+ {
+ return *reinterpret_cast<const VkVertexInputBindingDescription*>(this);
+ }
+
+ bool operator==( VertexInputBindingDescription const& rhs ) const
+ {
+ return ( binding == rhs.binding )
+ && ( stride == rhs.stride )
+ && ( inputRate == rhs.inputRate );
+ }
+
+ bool operator!=( VertexInputBindingDescription const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t binding;
+ uint32_t stride;
+ VertexInputRate inputRate;
+ };
+ static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+
+ enum class Format
+ {
+ eUndefined = VK_FORMAT_UNDEFINED,
+ eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
+ eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+ eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+ eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
+ eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
+ eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+ eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+ eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+ eR8Unorm = VK_FORMAT_R8_UNORM,
+ eR8Snorm = VK_FORMAT_R8_SNORM,
+ eR8Uscaled = VK_FORMAT_R8_USCALED,
+ eR8Sscaled = VK_FORMAT_R8_SSCALED,
+ eR8Uint = VK_FORMAT_R8_UINT,
+ eR8Sint = VK_FORMAT_R8_SINT,
+ eR8Srgb = VK_FORMAT_R8_SRGB,
+ eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
+ eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
+ eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
+ eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
+ eR8G8Uint = VK_FORMAT_R8G8_UINT,
+ eR8G8Sint = VK_FORMAT_R8G8_SINT,
+ eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
+ eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
+ eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
+ eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
+ eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
+ eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
+ eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
+ eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
+ eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
+ eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
+ eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
+ eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
+ eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
+ eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
+ eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
+ eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
+ eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
+ eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
+ eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
+ eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
+ eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
+ eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
+ eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
+ eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
+ eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
+ eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
+ eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
+ eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
+ eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
+ eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+ eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+ eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
+ eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
+ eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
+ eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
+ eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+ eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+ eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
+ eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+ eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
+ eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
+ eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
+ eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+ eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
+ eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
+ eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
+ eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
+ eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
+ eR16Unorm = VK_FORMAT_R16_UNORM,
+ eR16Snorm = VK_FORMAT_R16_SNORM,
+ eR16Uscaled = VK_FORMAT_R16_USCALED,
+ eR16Sscaled = VK_FORMAT_R16_SSCALED,
+ eR16Uint = VK_FORMAT_R16_UINT,
+ eR16Sint = VK_FORMAT_R16_SINT,
+ eR16Sfloat = VK_FORMAT_R16_SFLOAT,
+ eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
+ eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
+ eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
+ eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
+ eR16G16Uint = VK_FORMAT_R16G16_UINT,
+ eR16G16Sint = VK_FORMAT_R16G16_SINT,
+ eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
+ eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
+ eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
+ eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
+ eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
+ eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
+ eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
+ eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
+ eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
+ eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
+ eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
+ eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
+ eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
+ eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
+ eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
+ eR32Uint = VK_FORMAT_R32_UINT,
+ eR32Sint = VK_FORMAT_R32_SINT,
+ eR32Sfloat = VK_FORMAT_R32_SFLOAT,
+ eR32G32Uint = VK_FORMAT_R32G32_UINT,
+ eR32G32Sint = VK_FORMAT_R32G32_SINT,
+ eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
+ eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
+ eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
+ eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
+ eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
+ eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
+ eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
+ eR64Uint = VK_FORMAT_R64_UINT,
+ eR64Sint = VK_FORMAT_R64_SINT,
+ eR64Sfloat = VK_FORMAT_R64_SFLOAT,
+ eR64G64Uint = VK_FORMAT_R64G64_UINT,
+ eR64G64Sint = VK_FORMAT_R64G64_SINT,
+ eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
+ eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
+ eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
+ eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
+ eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
+ eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
+ eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
+ eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+ eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+ eD16Unorm = VK_FORMAT_D16_UNORM,
+ eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
+ eD32Sfloat = VK_FORMAT_D32_SFLOAT,
+ eS8Uint = VK_FORMAT_S8_UINT,
+ eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
+ eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
+ eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
+ eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
+ eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
+ eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
+ eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+ eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
+ eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
+ eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
+ eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
+ eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
+ eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
+ eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
+ eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
+ eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
+ eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
+ eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
+ eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
+ eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+ eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+ eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+ eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+ eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+ eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+ eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
+ eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
+ eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+ eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+ eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+ eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+ eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+ eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+ eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+ eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+ eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+ eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+ eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+ eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+ eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+ eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+ eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+ eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+ eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+ eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+ eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+ eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+ eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+ eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+ eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+ eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+ eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+ eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+ eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+ eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+ eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+ eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+ eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM,
+ eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM,
+ eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM,
+ eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM,
+ eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+ eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+ eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+ eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+ eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+ eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+ eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+ eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+ eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+ eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+ eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16,
+ eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16,
+ eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+ eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+ eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+ eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+ eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+ eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+ eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+ eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+ eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+ eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+ eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+ eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+ eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+ eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+ eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+ eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+ eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+ eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+ eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16,
+ eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16,
+ eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+ eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+ eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+ eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+ eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+ eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+ eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+ eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+ eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+ eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+ eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+ eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+ eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+ eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+ eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+ eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+ eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+ eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+ eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM,
+ eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM,
+ eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM,
+ eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM,
+ eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+ eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+ eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+ eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+ eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+ eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+ eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+ eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+ eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+ eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+ ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
+ ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
+ ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
+ ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
+ ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
+ ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
+ ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
+ ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG
+ };
+
+ struct VertexInputAttributeDescription
+ {
+ VertexInputAttributeDescription( uint32_t location_ = 0, uint32_t binding_ = 0, Format format_ = Format::eUndefined, uint32_t offset_ = 0 )
+ : location( location_ )
+ , binding( binding_ )
+ , format( format_ )
+ , offset( offset_ )
+ {
+ }
+
+ VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) );
+ }
+
+ VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) );
+ return *this;
+ }
+ VertexInputAttributeDescription& setLocation( uint32_t location_ )
+ {
+ location = location_;
+ return *this;
+ }
+
+ VertexInputAttributeDescription& setBinding( uint32_t binding_ )
+ {
+ binding = binding_;
+ return *this;
+ }
+
+ VertexInputAttributeDescription& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ VertexInputAttributeDescription& setOffset( uint32_t offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ operator const VkVertexInputAttributeDescription&() const
+ {
+ return *reinterpret_cast<const VkVertexInputAttributeDescription*>(this);
+ }
+
+ bool operator==( VertexInputAttributeDescription const& rhs ) const
+ {
+ return ( location == rhs.location )
+ && ( binding == rhs.binding )
+ && ( format == rhs.format )
+ && ( offset == rhs.offset );
+ }
+
+ bool operator!=( VertexInputAttributeDescription const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t location;
+ uint32_t binding;
+ Format format;
+ uint32_t offset;
+ };
+ static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+
+ enum class StructureType
+ {
+ eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+ eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+ eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+ eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
+ eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+ eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+ eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+ eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+ eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+ eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+ eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+ ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+ ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+ ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+ ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+ ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+ ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+ ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+ ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+ ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+ ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+ ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+ eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+ eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+ eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+ eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+ eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+ eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+ eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+ eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+ eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+ eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+ eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+ eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+ eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+ eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+ eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+ eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
+ eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+ ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
+ eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+ eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+ eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+ eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+ ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+ ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+ eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+ eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+ eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+ eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+ eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+ eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+ eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+ eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+ eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+ eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+ eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+ eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+ eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+ eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+ eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+ eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+ ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+ ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+ eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+ eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+ eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+ eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+ eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+ eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+ eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+ eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+ eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+ eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+ eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+ eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+ ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+ ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+ ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+ ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+ eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+ eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+ eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+ eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+ ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+ ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+ eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+ eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+ ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+ ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+ eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+ eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+ ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+ ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+ ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+ ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+ eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+ eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+ eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+ eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+ ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+ ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+ eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+ eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+ ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+ ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+ ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+ ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+ ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+ ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+ eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
+ ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
+ ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
+ eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
+ eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+ eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+ eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+ eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+ eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+ eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+ eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+ eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+ ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+ ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+ eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+ eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+ eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+ eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+ ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+ ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+ eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+ eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+ ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+ ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+ eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+ eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+ ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+ ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+ eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+ eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+ eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+ eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+ eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+ eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+ ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+ ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+ eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+ eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+ eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+ eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+ eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+ eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+ ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+ ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+ eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+ eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+ ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+ ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+ eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+ eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+ ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+ eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+ ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+ eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
+ eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
+ eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
+ eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
+ eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
+ eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
+ eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
+ eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
+ eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
+ eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
+ eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
+ eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
+ eMirSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR,
+ eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
+ eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
+ eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+ ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
+ eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
+ eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
+ eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
+ eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
+ eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
+ eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
+ eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+ eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
+ eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
+ eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+ eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+ eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
+ eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+ eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
+ eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+ eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+ eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
+ eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
+ eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+ eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
+ eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+ eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
+ eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+ eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+ eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
+ eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
+ eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
+ eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
+ ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
+ ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
+ eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
+ eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
+ eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
+ eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
+ eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
+ eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
+ ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+ eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+ eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
+ eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
+ eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
+ eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
+ ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+ ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
+ ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+ ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
+ ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
+ ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
+ eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
+ eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
+ eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+ eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+ eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
+ eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
+ eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+ ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
+ eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
+ eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
+ eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
+ eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
+ eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
+ eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
+ eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
+ eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
+ eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
+ eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
+ eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
+ eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
+ eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+ eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+ eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
+ ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+ eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+ eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
+ eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+ ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
+ eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
+ eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+ ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
+ ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
+ ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
+ ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+ ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
+ eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
+ eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
+ eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+ ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+ ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+ eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+ eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+ eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
+ eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+ eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
+ ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
+ ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
+ ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
+ ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT
+ };
+
+ struct ApplicationInfo
+ {
+ ApplicationInfo( const char* pApplicationName_ = nullptr, uint32_t applicationVersion_ = 0, const char* pEngineName_ = nullptr, uint32_t engineVersion_ = 0, uint32_t apiVersion_ = 0 )
+ : pApplicationName( pApplicationName_ )
+ , applicationVersion( applicationVersion_ )
+ , pEngineName( pEngineName_ )
+ , engineVersion( engineVersion_ )
+ , apiVersion( apiVersion_ )
+ {
+ }
+
+ ApplicationInfo( VkApplicationInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ApplicationInfo ) );
+ }
+
+ ApplicationInfo& operator=( VkApplicationInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ApplicationInfo ) );
+ return *this;
+ }
+ ApplicationInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ApplicationInfo& setPApplicationName( const char* pApplicationName_ )
+ {
+ pApplicationName = pApplicationName_;
+ return *this;
+ }
+
+ ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ )
+ {
+ applicationVersion = applicationVersion_;
+ return *this;
+ }
+
+ ApplicationInfo& setPEngineName( const char* pEngineName_ )
+ {
+ pEngineName = pEngineName_;
+ return *this;
+ }
+
+ ApplicationInfo& setEngineVersion( uint32_t engineVersion_ )
+ {
+ engineVersion = engineVersion_;
+ return *this;
+ }
+
+ ApplicationInfo& setApiVersion( uint32_t apiVersion_ )
+ {
+ apiVersion = apiVersion_;
+ return *this;
+ }
+
+ operator const VkApplicationInfo&() const
+ {
+ return *reinterpret_cast<const VkApplicationInfo*>(this);
+ }
+
+ bool operator==( ApplicationInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pApplicationName == rhs.pApplicationName )
+ && ( applicationVersion == rhs.applicationVersion )
+ && ( pEngineName == rhs.pEngineName )
+ && ( engineVersion == rhs.engineVersion )
+ && ( apiVersion == rhs.apiVersion );
+ }
+
+ bool operator!=( ApplicationInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eApplicationInfo;
+
+ public:
+ const void* pNext = nullptr;
+ const char* pApplicationName;
+ uint32_t applicationVersion;
+ const char* pEngineName;
+ uint32_t engineVersion;
+ uint32_t apiVersion;
+ };
+ static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+
+ struct InstanceCreateInfo
+ {
+ InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), const ApplicationInfo* pApplicationInfo_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr )
+ : flags( flags_ )
+ , pApplicationInfo( pApplicationInfo_ )
+ , enabledLayerCount( enabledLayerCount_ )
+ , ppEnabledLayerNames( ppEnabledLayerNames_ )
+ , enabledExtensionCount( enabledExtensionCount_ )
+ , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+ {
+ }
+
+ InstanceCreateInfo( VkInstanceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( InstanceCreateInfo ) );
+ }
+
+ InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( InstanceCreateInfo ) );
+ return *this;
+ }
+ InstanceCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ )
+ {
+ pApplicationInfo = pApplicationInfo_;
+ return *this;
+ }
+
+ InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
+ {
+ enabledLayerCount = enabledLayerCount_;
+ return *this;
+ }
+
+ InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
+ {
+ ppEnabledLayerNames = ppEnabledLayerNames_;
+ return *this;
+ }
+
+ InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
+ {
+ enabledExtensionCount = enabledExtensionCount_;
+ return *this;
+ }
+
+ InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
+ {
+ ppEnabledExtensionNames = ppEnabledExtensionNames_;
+ return *this;
+ }
+
+ operator const VkInstanceCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkInstanceCreateInfo*>(this);
+ }
+
+ bool operator==( InstanceCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pApplicationInfo == rhs.pApplicationInfo )
+ && ( enabledLayerCount == rhs.enabledLayerCount )
+ && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+ && ( enabledExtensionCount == rhs.enabledExtensionCount )
+ && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
+ }
+
+ bool operator!=( InstanceCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eInstanceCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ InstanceCreateFlags flags;
+ const ApplicationInfo* pApplicationInfo;
+ uint32_t enabledLayerCount;
+ const char* const* ppEnabledLayerNames;
+ uint32_t enabledExtensionCount;
+ const char* const* ppEnabledExtensionNames;
+ };
+ static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+
+ struct MemoryAllocateInfo
+ {
+ MemoryAllocateInfo( DeviceSize allocationSize_ = 0, uint32_t memoryTypeIndex_ = 0 )
+ : allocationSize( allocationSize_ )
+ , memoryTypeIndex( memoryTypeIndex_ )
+ {
+ }
+
+ MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) );
+ }
+
+ MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) );
+ return *this;
+ }
+ MemoryAllocateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ )
+ {
+ allocationSize = allocationSize_;
+ return *this;
+ }
+
+ MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ )
+ {
+ memoryTypeIndex = memoryTypeIndex_;
+ return *this;
+ }
+
+ operator const VkMemoryAllocateInfo&() const
+ {
+ return *reinterpret_cast<const VkMemoryAllocateInfo*>(this);
+ }
+
+ bool operator==( MemoryAllocateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( allocationSize == rhs.allocationSize )
+ && ( memoryTypeIndex == rhs.memoryTypeIndex );
+ }
+
+ bool operator!=( MemoryAllocateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryAllocateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceSize allocationSize;
+ uint32_t memoryTypeIndex;
+ };
+ static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+
+ struct MappedMemoryRange
+ {
+ MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
+ : memory( memory_ )
+ , offset( offset_ )
+ , size( size_ )
+ {
+ }
+
+ MappedMemoryRange( VkMappedMemoryRange const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MappedMemoryRange ) );
+ }
+
+ MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MappedMemoryRange ) );
+ return *this;
+ }
+ MappedMemoryRange& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MappedMemoryRange& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ MappedMemoryRange& setOffset( DeviceSize offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ MappedMemoryRange& setSize( DeviceSize size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ operator const VkMappedMemoryRange&() const
+ {
+ return *reinterpret_cast<const VkMappedMemoryRange*>(this);
+ }
+
+ bool operator==( MappedMemoryRange const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( MappedMemoryRange const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMappedMemoryRange;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceMemory memory;
+ DeviceSize offset;
+ DeviceSize size;
+ };
+ static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+
+ struct WriteDescriptorSet
+ {
+ WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, const DescriptorImageInfo* pImageInfo_ = nullptr, const DescriptorBufferInfo* pBufferInfo_ = nullptr, const BufferView* pTexelBufferView_ = nullptr )
+ : dstSet( dstSet_ )
+ , dstBinding( dstBinding_ )
+ , dstArrayElement( dstArrayElement_ )
+ , descriptorCount( descriptorCount_ )
+ , descriptorType( descriptorType_ )
+ , pImageInfo( pImageInfo_ )
+ , pBufferInfo( pBufferInfo_ )
+ , pTexelBufferView( pTexelBufferView_ )
+ {
+ }
+
+ WriteDescriptorSet( VkWriteDescriptorSet const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( WriteDescriptorSet ) );
+ }
+
+ WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( WriteDescriptorSet ) );
+ return *this;
+ }
+ WriteDescriptorSet& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ )
+ {
+ dstSet = dstSet_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ )
+ {
+ dstBinding = dstBinding_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
+ {
+ dstArrayElement = dstArrayElement_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
+ {
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ )
+ {
+ descriptorType = descriptorType_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ )
+ {
+ pImageInfo = pImageInfo_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ )
+ {
+ pBufferInfo = pBufferInfo_;
+ return *this;
+ }
+
+ WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ )
+ {
+ pTexelBufferView = pTexelBufferView_;
+ return *this;
+ }
+
+ operator const VkWriteDescriptorSet&() const
+ {
+ return *reinterpret_cast<const VkWriteDescriptorSet*>(this);
+ }
+
+ bool operator==( WriteDescriptorSet const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( dstSet == rhs.dstSet )
+ && ( dstBinding == rhs.dstBinding )
+ && ( dstArrayElement == rhs.dstArrayElement )
+ && ( descriptorCount == rhs.descriptorCount )
+ && ( descriptorType == rhs.descriptorType )
+ && ( pImageInfo == rhs.pImageInfo )
+ && ( pBufferInfo == rhs.pBufferInfo )
+ && ( pTexelBufferView == rhs.pTexelBufferView );
+ }
+
+ bool operator!=( WriteDescriptorSet const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eWriteDescriptorSet;
+
+ public:
+ const void* pNext = nullptr;
+ DescriptorSet dstSet;
+ uint32_t dstBinding;
+ uint32_t dstArrayElement;
+ uint32_t descriptorCount;
+ DescriptorType descriptorType;
+ const DescriptorImageInfo* pImageInfo;
+ const DescriptorBufferInfo* pBufferInfo;
+ const BufferView* pTexelBufferView;
+ };
+ static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+
+ struct CopyDescriptorSet
+ {
+ CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), uint32_t srcBinding_ = 0, uint32_t srcArrayElement_ = 0, DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0 )
+ : srcSet( srcSet_ )
+ , srcBinding( srcBinding_ )
+ , srcArrayElement( srcArrayElement_ )
+ , dstSet( dstSet_ )
+ , dstBinding( dstBinding_ )
+ , dstArrayElement( dstArrayElement_ )
+ , descriptorCount( descriptorCount_ )
+ {
+ }
+
+ CopyDescriptorSet( VkCopyDescriptorSet const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CopyDescriptorSet ) );
+ }
+
+ CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CopyDescriptorSet ) );
+ return *this;
+ }
+ CopyDescriptorSet& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ )
+ {
+ srcSet = srcSet_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ )
+ {
+ srcBinding = srcBinding_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ )
+ {
+ srcArrayElement = srcArrayElement_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ )
+ {
+ dstSet = dstSet_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ )
+ {
+ dstBinding = dstBinding_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
+ {
+ dstArrayElement = dstArrayElement_;
+ return *this;
+ }
+
+ CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
+ {
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+ operator const VkCopyDescriptorSet&() const
+ {
+ return *reinterpret_cast<const VkCopyDescriptorSet*>(this);
+ }
+
+ bool operator==( CopyDescriptorSet const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcSet == rhs.srcSet )
+ && ( srcBinding == rhs.srcBinding )
+ && ( srcArrayElement == rhs.srcArrayElement )
+ && ( dstSet == rhs.dstSet )
+ && ( dstBinding == rhs.dstBinding )
+ && ( dstArrayElement == rhs.dstArrayElement )
+ && ( descriptorCount == rhs.descriptorCount );
+ }
+
+ bool operator!=( CopyDescriptorSet const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCopyDescriptorSet;
+
+ public:
+ const void* pNext = nullptr;
+ DescriptorSet srcSet;
+ uint32_t srcBinding;
+ uint32_t srcArrayElement;
+ DescriptorSet dstSet;
+ uint32_t dstBinding;
+ uint32_t dstArrayElement;
+ uint32_t descriptorCount;
+ };
+ static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+
+ struct BufferViewCreateInfo
+ {
+ BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), Buffer buffer_ = Buffer(), Format format_ = Format::eUndefined, DeviceSize offset_ = 0, DeviceSize range_ = 0 )
+ : flags( flags_ )
+ , buffer( buffer_ )
+ , format( format_ )
+ , offset( offset_ )
+ , range( range_ )
+ {
+ }
+
+ BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) );
+ }
+
+ BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) );
+ return *this;
+ }
+ BufferViewCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ BufferViewCreateInfo& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ BufferViewCreateInfo& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ BufferViewCreateInfo& setOffset( DeviceSize offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ BufferViewCreateInfo& setRange( DeviceSize range_ )
+ {
+ range = range_;
+ return *this;
+ }
+
+ operator const VkBufferViewCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkBufferViewCreateInfo*>(this);
+ }
+
+ bool operator==( BufferViewCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( buffer == rhs.buffer )
+ && ( format == rhs.format )
+ && ( offset == rhs.offset )
+ && ( range == rhs.range );
+ }
+
+ bool operator!=( BufferViewCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBufferViewCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ BufferViewCreateFlags flags;
+ Buffer buffer;
+ Format format;
+ DeviceSize offset;
+ DeviceSize range;
+ };
+ static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+
+ struct ShaderModuleCreateInfo
+ {
+ ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), size_t codeSize_ = 0, const uint32_t* pCode_ = nullptr )
+ : flags( flags_ )
+ , codeSize( codeSize_ )
+ , pCode( pCode_ )
+ {
+ }
+
+ ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) );
+ }
+
+ ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) );
+ return *this;
+ }
+ ShaderModuleCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ )
+ {
+ codeSize = codeSize_;
+ return *this;
+ }
+
+ ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ )
+ {
+ pCode = pCode_;
+ return *this;
+ }
+
+ operator const VkShaderModuleCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkShaderModuleCreateInfo*>(this);
+ }
+
+ bool operator==( ShaderModuleCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( codeSize == rhs.codeSize )
+ && ( pCode == rhs.pCode );
+ }
+
+ bool operator!=( ShaderModuleCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eShaderModuleCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ShaderModuleCreateFlags flags;
+ size_t codeSize;
+ const uint32_t* pCode;
+ };
+ static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
+
+ struct DescriptorSetAllocateInfo
+ {
+ DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), uint32_t descriptorSetCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr )
+ : descriptorPool( descriptorPool_ )
+ , descriptorSetCount( descriptorSetCount_ )
+ , pSetLayouts( pSetLayouts_ )
+ {
+ }
+
+ DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) );
+ }
+
+ DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) );
+ return *this;
+ }
+ DescriptorSetAllocateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ )
+ {
+ descriptorPool = descriptorPool_;
+ return *this;
+ }
+
+ DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ )
+ {
+ descriptorSetCount = descriptorSetCount_;
+ return *this;
+ }
+
+ DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
+ {
+ pSetLayouts = pSetLayouts_;
+ return *this;
+ }
+
+ operator const VkDescriptorSetAllocateInfo&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>(this);
+ }
+
+ bool operator==( DescriptorSetAllocateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( descriptorPool == rhs.descriptorPool )
+ && ( descriptorSetCount == rhs.descriptorSetCount )
+ && ( pSetLayouts == rhs.pSetLayouts );
+ }
+
+ bool operator!=( DescriptorSetAllocateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorSetAllocateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ DescriptorPool descriptorPool;
+ uint32_t descriptorSetCount;
+ const DescriptorSetLayout* pSetLayouts;
+ };
+ static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineVertexInputStateCreateInfo
+ {
+ PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), uint32_t vertexBindingDescriptionCount_ = 0, const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, uint32_t vertexAttributeDescriptionCount_ = 0, const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr )
+ : flags( flags_ )
+ , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
+ , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
+ , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
+ , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
+ {
+ }
+
+ PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
+ }
+
+ PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
+ return *this;
+ }
+ PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ )
+ {
+ vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+ return *this;
+ }
+
+ PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ )
+ {
+ pVertexBindingDescriptions = pVertexBindingDescriptions_;
+ return *this;
+ }
+
+ PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ )
+ {
+ vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+ return *this;
+ }
+
+ PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ )
+ {
+ pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+ return *this;
+ }
+
+ operator const VkPipelineVertexInputStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
+ && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
+ && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
+ && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+ }
+
+ bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineVertexInputStateCreateFlags flags;
+ uint32_t vertexBindingDescriptionCount;
+ const VertexInputBindingDescription* pVertexBindingDescriptions;
+ uint32_t vertexAttributeDescriptionCount;
+ const VertexInputAttributeDescription* pVertexAttributeDescriptions;
+ };
+ static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineInputAssemblyStateCreateInfo
+ {
+ PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = 0 )
+ : flags( flags_ )
+ , topology( topology_ )
+ , primitiveRestartEnable( primitiveRestartEnable_ )
+ {
+ }
+
+ PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
+ }
+
+ PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
+ return *this;
+ }
+ PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ )
+ {
+ topology = topology_;
+ return *this;
+ }
+
+ PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ )
+ {
+ primitiveRestartEnable = primitiveRestartEnable_;
+ return *this;
+ }
+
+ operator const VkPipelineInputAssemblyStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( topology == rhs.topology )
+ && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+ }
+
+ bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineInputAssemblyStateCreateFlags flags;
+ PrimitiveTopology topology;
+ Bool32 primitiveRestartEnable;
+ };
+ static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineTessellationStateCreateInfo
+ {
+ PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), uint32_t patchControlPoints_ = 0 )
+ : flags( flags_ )
+ , patchControlPoints( patchControlPoints_ )
+ {
+ }
+
+ PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
+ }
+
+ PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
+ return *this;
+ }
+ PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ )
+ {
+ patchControlPoints = patchControlPoints_;
+ return *this;
+ }
+
+ operator const VkPipelineTessellationStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( patchControlPoints == rhs.patchControlPoints );
+ }
+
+ bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineTessellationStateCreateFlags flags;
+ uint32_t patchControlPoints;
+ };
+ static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineViewportStateCreateInfo
+ {
+ PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), uint32_t viewportCount_ = 0, const Viewport* pViewports_ = nullptr, uint32_t scissorCount_ = 0, const Rect2D* pScissors_ = nullptr )
+ : flags( flags_ )
+ , viewportCount( viewportCount_ )
+ , pViewports( pViewports_ )
+ , scissorCount( scissorCount_ )
+ , pScissors( pScissors_ )
+ {
+ }
+
+ PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) );
+ }
+
+ PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) );
+ return *this;
+ }
+ PipelineViewportStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ )
+ {
+ viewportCount = viewportCount_;
+ return *this;
+ }
+
+ PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ )
+ {
+ pViewports = pViewports_;
+ return *this;
+ }
+
+ PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ )
+ {
+ scissorCount = scissorCount_;
+ return *this;
+ }
+
+ PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ )
+ {
+ pScissors = pScissors_;
+ return *this;
+ }
+
+ operator const VkPipelineViewportStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineViewportStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( viewportCount == rhs.viewportCount )
+ && ( pViewports == rhs.pViewports )
+ && ( scissorCount == rhs.scissorCount )
+ && ( pScissors == rhs.pScissors );
+ }
+
+ bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineViewportStateCreateFlags flags;
+ uint32_t viewportCount;
+ const Viewport* pViewports;
+ uint32_t scissorCount;
+ const Rect2D* pScissors;
+ };
+ static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineRasterizationStateCreateInfo
+ {
+ PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), Bool32 depthClampEnable_ = 0, Bool32 rasterizerDiscardEnable_ = 0, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = CullModeFlags(), FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = 0, float depthBiasConstantFactor_ = 0, float depthBiasClamp_ = 0, float depthBiasSlopeFactor_ = 0, float lineWidth_ = 0 )
+ : flags( flags_ )
+ , depthClampEnable( depthClampEnable_ )
+ , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
+ , polygonMode( polygonMode_ )
+ , cullMode( cullMode_ )
+ , frontFace( frontFace_ )
+ , depthBiasEnable( depthBiasEnable_ )
+ , depthBiasConstantFactor( depthBiasConstantFactor_ )
+ , depthBiasClamp( depthBiasClamp_ )
+ , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
+ , lineWidth( lineWidth_ )
+ {
+ }
+
+ PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
+ }
+
+ PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
+ return *this;
+ }
+ PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ )
+ {
+ depthClampEnable = depthClampEnable_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ )
+ {
+ rasterizerDiscardEnable = rasterizerDiscardEnable_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ )
+ {
+ polygonMode = polygonMode_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ )
+ {
+ cullMode = cullMode_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ )
+ {
+ frontFace = frontFace_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ )
+ {
+ depthBiasEnable = depthBiasEnable_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ )
+ {
+ depthBiasConstantFactor = depthBiasConstantFactor_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ )
+ {
+ depthBiasClamp = depthBiasClamp_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ )
+ {
+ depthBiasSlopeFactor = depthBiasSlopeFactor_;
+ return *this;
+ }
+
+ PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ )
+ {
+ lineWidth = lineWidth_;
+ return *this;
+ }
+
+ operator const VkPipelineRasterizationStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( depthClampEnable == rhs.depthClampEnable )
+ && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
+ && ( polygonMode == rhs.polygonMode )
+ && ( cullMode == rhs.cullMode )
+ && ( frontFace == rhs.frontFace )
+ && ( depthBiasEnable == rhs.depthBiasEnable )
+ && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
+ && ( depthBiasClamp == rhs.depthBiasClamp )
+ && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
+ && ( lineWidth == rhs.lineWidth );
+ }
+
+ bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineRasterizationStateCreateFlags flags;
+ Bool32 depthClampEnable;
+ Bool32 rasterizerDiscardEnable;
+ PolygonMode polygonMode;
+ CullModeFlags cullMode;
+ FrontFace frontFace;
+ Bool32 depthBiasEnable;
+ float depthBiasConstantFactor;
+ float depthBiasClamp;
+ float depthBiasSlopeFactor;
+ float lineWidth;
+ };
+ static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineDepthStencilStateCreateInfo
+ {
+ PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), Bool32 depthTestEnable_ = 0, Bool32 depthWriteEnable_ = 0, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = 0, Bool32 stencilTestEnable_ = 0, StencilOpState front_ = StencilOpState(), StencilOpState back_ = StencilOpState(), float minDepthBounds_ = 0, float maxDepthBounds_ = 0 )
+ : flags( flags_ )
+ , depthTestEnable( depthTestEnable_ )
+ , depthWriteEnable( depthWriteEnable_ )
+ , depthCompareOp( depthCompareOp_ )
+ , depthBoundsTestEnable( depthBoundsTestEnable_ )
+ , stencilTestEnable( stencilTestEnable_ )
+ , front( front_ )
+ , back( back_ )
+ , minDepthBounds( minDepthBounds_ )
+ , maxDepthBounds( maxDepthBounds_ )
+ {
+ }
+
+ PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
+ }
+
+ PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
+ return *this;
+ }
+ PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ )
+ {
+ depthTestEnable = depthTestEnable_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ )
+ {
+ depthWriteEnable = depthWriteEnable_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ )
+ {
+ depthCompareOp = depthCompareOp_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ )
+ {
+ depthBoundsTestEnable = depthBoundsTestEnable_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ )
+ {
+ stencilTestEnable = stencilTestEnable_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ )
+ {
+ front = front_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ )
+ {
+ back = back_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ )
+ {
+ minDepthBounds = minDepthBounds_;
+ return *this;
+ }
+
+ PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ )
+ {
+ maxDepthBounds = maxDepthBounds_;
+ return *this;
+ }
+
+ operator const VkPipelineDepthStencilStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( depthTestEnable == rhs.depthTestEnable )
+ && ( depthWriteEnable == rhs.depthWriteEnable )
+ && ( depthCompareOp == rhs.depthCompareOp )
+ && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
+ && ( stencilTestEnable == rhs.stencilTestEnable )
+ && ( front == rhs.front )
+ && ( back == rhs.back )
+ && ( minDepthBounds == rhs.minDepthBounds )
+ && ( maxDepthBounds == rhs.maxDepthBounds );
+ }
+
+ bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineDepthStencilStateCreateFlags flags;
+ Bool32 depthTestEnable;
+ Bool32 depthWriteEnable;
+ CompareOp depthCompareOp;
+ Bool32 depthBoundsTestEnable;
+ Bool32 stencilTestEnable;
+ StencilOpState front;
+ StencilOpState back;
+ float minDepthBounds;
+ float maxDepthBounds;
+ };
+ static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineCacheCreateInfo
+ {
+ PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr )
+ : flags( flags_ )
+ , initialDataSize( initialDataSize_ )
+ , pInitialData( pInitialData_ )
+ {
+ }
+
+ PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) );
+ }
+
+ PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) );
+ return *this;
+ }
+ PipelineCacheCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ )
+ {
+ initialDataSize = initialDataSize_;
+ return *this;
+ }
+
+ PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ )
+ {
+ pInitialData = pInitialData_;
+ return *this;
+ }
+
+ operator const VkPipelineCacheCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineCacheCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineCacheCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( initialDataSize == rhs.initialDataSize )
+ && ( pInitialData == rhs.pInitialData );
+ }
+
+ bool operator!=( PipelineCacheCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineCacheCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineCacheCreateFlags flags;
+ size_t initialDataSize;
+ const void* pInitialData;
+ };
+ static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+
+ struct SamplerCreateInfo
+ {
+ SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = 0, Bool32 anisotropyEnable_ = 0, float maxAnisotropy_ = 0, Bool32 compareEnable_ = 0, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = 0, float maxLod_ = 0, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = 0 )
+ : flags( flags_ )
+ , magFilter( magFilter_ )
+ , minFilter( minFilter_ )
+ , mipmapMode( mipmapMode_ )
+ , addressModeU( addressModeU_ )
+ , addressModeV( addressModeV_ )
+ , addressModeW( addressModeW_ )
+ , mipLodBias( mipLodBias_ )
+ , anisotropyEnable( anisotropyEnable_ )
+ , maxAnisotropy( maxAnisotropy_ )
+ , compareEnable( compareEnable_ )
+ , compareOp( compareOp_ )
+ , minLod( minLod_ )
+ , maxLod( maxLod_ )
+ , borderColor( borderColor_ )
+ , unnormalizedCoordinates( unnormalizedCoordinates_ )
+ {
+ }
+
+ SamplerCreateInfo( VkSamplerCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerCreateInfo ) );
+ }
+
+ SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerCreateInfo ) );
+ return *this;
+ }
+ SamplerCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMagFilter( Filter magFilter_ )
+ {
+ magFilter = magFilter_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMinFilter( Filter minFilter_ )
+ {
+ minFilter = minFilter_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ )
+ {
+ mipmapMode = mipmapMode_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ )
+ {
+ addressModeU = addressModeU_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ )
+ {
+ addressModeV = addressModeV_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ )
+ {
+ addressModeW = addressModeW_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMipLodBias( float mipLodBias_ )
+ {
+ mipLodBias = mipLodBias_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ )
+ {
+ anisotropyEnable = anisotropyEnable_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ )
+ {
+ maxAnisotropy = maxAnisotropy_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ )
+ {
+ compareEnable = compareEnable_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setCompareOp( CompareOp compareOp_ )
+ {
+ compareOp = compareOp_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMinLod( float minLod_ )
+ {
+ minLod = minLod_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setMaxLod( float maxLod_ )
+ {
+ maxLod = maxLod_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setBorderColor( BorderColor borderColor_ )
+ {
+ borderColor = borderColor_;
+ return *this;
+ }
+
+ SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ )
+ {
+ unnormalizedCoordinates = unnormalizedCoordinates_;
+ return *this;
+ }
+
+ operator const VkSamplerCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkSamplerCreateInfo*>(this);
+ }
+
+ bool operator==( SamplerCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( magFilter == rhs.magFilter )
+ && ( minFilter == rhs.minFilter )
+ && ( mipmapMode == rhs.mipmapMode )
+ && ( addressModeU == rhs.addressModeU )
+ && ( addressModeV == rhs.addressModeV )
+ && ( addressModeW == rhs.addressModeW )
+ && ( mipLodBias == rhs.mipLodBias )
+ && ( anisotropyEnable == rhs.anisotropyEnable )
+ && ( maxAnisotropy == rhs.maxAnisotropy )
+ && ( compareEnable == rhs.compareEnable )
+ && ( compareOp == rhs.compareOp )
+ && ( minLod == rhs.minLod )
+ && ( maxLod == rhs.maxLod )
+ && ( borderColor == rhs.borderColor )
+ && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+ }
+
+ bool operator!=( SamplerCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSamplerCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ SamplerCreateFlags flags;
+ Filter magFilter;
+ Filter minFilter;
+ SamplerMipmapMode mipmapMode;
+ SamplerAddressMode addressModeU;
+ SamplerAddressMode addressModeV;
+ SamplerAddressMode addressModeW;
+ float mipLodBias;
+ Bool32 anisotropyEnable;
+ float maxAnisotropy;
+ Bool32 compareEnable;
+ CompareOp compareOp;
+ float minLod;
+ float maxLod;
+ BorderColor borderColor;
+ Bool32 unnormalizedCoordinates;
+ };
+ static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+
+ struct CommandBufferAllocateInfo
+ {
+ CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = 0 )
+ : commandPool( commandPool_ )
+ , level( level_ )
+ , commandBufferCount( commandBufferCount_ )
+ {
+ }
+
+ CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) );
+ }
+
+ CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) );
+ return *this;
+ }
+ CommandBufferAllocateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ )
+ {
+ commandPool = commandPool_;
+ return *this;
+ }
+
+ CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ )
+ {
+ level = level_;
+ return *this;
+ }
+
+ CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+ {
+ commandBufferCount = commandBufferCount_;
+ return *this;
+ }
+
+ operator const VkCommandBufferAllocateInfo&() const
+ {
+ return *reinterpret_cast<const VkCommandBufferAllocateInfo*>(this);
+ }
+
+ bool operator==( CommandBufferAllocateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( commandPool == rhs.commandPool )
+ && ( level == rhs.level )
+ && ( commandBufferCount == rhs.commandBufferCount );
+ }
+
+ bool operator!=( CommandBufferAllocateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCommandBufferAllocateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ CommandPool commandPool;
+ CommandBufferLevel level;
+ uint32_t commandBufferCount;
+ };
+ static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
+
+ struct RenderPassBeginInfo
+ {
+ RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), Framebuffer framebuffer_ = Framebuffer(), Rect2D renderArea_ = Rect2D(), uint32_t clearValueCount_ = 0, const ClearValue* pClearValues_ = nullptr )
+ : renderPass( renderPass_ )
+ , framebuffer( framebuffer_ )
+ , renderArea( renderArea_ )
+ , clearValueCount( clearValueCount_ )
+ , pClearValues( pClearValues_ )
+ {
+ }
+
+ RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) );
+ }
+
+ RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) );
+ return *this;
+ }
+ RenderPassBeginInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ )
+ {
+ renderPass = renderPass_;
+ return *this;
+ }
+
+ RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ )
+ {
+ framebuffer = framebuffer_;
+ return *this;
+ }
+
+ RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ )
+ {
+ renderArea = renderArea_;
+ return *this;
+ }
+
+ RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ )
+ {
+ clearValueCount = clearValueCount_;
+ return *this;
+ }
+
+ RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ )
+ {
+ pClearValues = pClearValues_;
+ return *this;
+ }
+
+ operator const VkRenderPassBeginInfo&() const
+ {
+ return *reinterpret_cast<const VkRenderPassBeginInfo*>(this);
+ }
+
+ bool operator==( RenderPassBeginInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( renderPass == rhs.renderPass )
+ && ( framebuffer == rhs.framebuffer )
+ && ( renderArea == rhs.renderArea )
+ && ( clearValueCount == rhs.clearValueCount )
+ && ( pClearValues == rhs.pClearValues );
+ }
+
+ bool operator!=( RenderPassBeginInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eRenderPassBeginInfo;
+
+ public:
+ const void* pNext = nullptr;
+ RenderPass renderPass;
+ Framebuffer framebuffer;
+ Rect2D renderArea;
+ uint32_t clearValueCount;
+ const ClearValue* pClearValues;
+ };
+ static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+
+ struct EventCreateInfo
+ {
+ EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() )
+ : flags( flags_ )
+ {
+ }
+
+ EventCreateInfo( VkEventCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( EventCreateInfo ) );
+ }
+
+ EventCreateInfo& operator=( VkEventCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( EventCreateInfo ) );
+ return *this;
+ }
+ EventCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ EventCreateInfo& setFlags( EventCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkEventCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkEventCreateInfo*>(this);
+ }
+
+ bool operator==( EventCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( EventCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eEventCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ EventCreateFlags flags;
+ };
+ static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+
+ struct SemaphoreCreateInfo
+ {
+ SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() )
+ : flags( flags_ )
+ {
+ }
+
+ SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) );
+ }
+
+ SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) );
+ return *this;
+ }
+ SemaphoreCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkSemaphoreCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkSemaphoreCreateInfo*>(this);
+ }
+
+ bool operator==( SemaphoreCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( SemaphoreCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSemaphoreCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ SemaphoreCreateFlags flags;
+ };
+ static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+
+ struct FramebufferCreateInfo
+ {
+ FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), RenderPass renderPass_ = RenderPass(), uint32_t attachmentCount_ = 0, const ImageView* pAttachments_ = nullptr, uint32_t width_ = 0, uint32_t height_ = 0, uint32_t layers_ = 0 )
+ : flags( flags_ )
+ , renderPass( renderPass_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , width( width_ )
+ , height( height_ )
+ , layers( layers_ )
+ {
+ }
+
+ FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) );
+ }
+
+ FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) );
+ return *this;
+ }
+ FramebufferCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ )
+ {
+ renderPass = renderPass_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+ {
+ attachmentCount = attachmentCount_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ )
+ {
+ pAttachments = pAttachments_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setWidth( uint32_t width_ )
+ {
+ width = width_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setHeight( uint32_t height_ )
+ {
+ height = height_;
+ return *this;
+ }
+
+ FramebufferCreateInfo& setLayers( uint32_t layers_ )
+ {
+ layers = layers_;
+ return *this;
+ }
+
+ operator const VkFramebufferCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkFramebufferCreateInfo*>(this);
+ }
+
+ bool operator==( FramebufferCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( renderPass == rhs.renderPass )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( layers == rhs.layers );
+ }
+
+ bool operator!=( FramebufferCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eFramebufferCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ FramebufferCreateFlags flags;
+ RenderPass renderPass;
+ uint32_t attachmentCount;
+ const ImageView* pAttachments;
+ uint32_t width;
+ uint32_t height;
+ uint32_t layers;
+ };
+ static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
+
+ struct DisplayModeCreateInfoKHR
+ {
+ DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() )
+ : flags( flags_ )
+ , parameters( parameters_ )
+ {
+ }
+
+ DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) );
+ }
+
+ DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) );
+ return *this;
+ }
+ DisplayModeCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ )
+ {
+ parameters = parameters_;
+ return *this;
+ }
+
+ operator const VkDisplayModeCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>(this);
+ }
+
+ bool operator==( DisplayModeCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( parameters == rhs.parameters );
+ }
+
+ bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ DisplayModeCreateFlagsKHR flags;
+ DisplayModeParametersKHR parameters;
+ };
+ static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ struct DisplayPresentInfoKHR
+ {
+ DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), Rect2D dstRect_ = Rect2D(), Bool32 persistent_ = 0 )
+ : srcRect( srcRect_ )
+ , dstRect( dstRect_ )
+ , persistent( persistent_ )
+ {
+ }
+
+ DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) );
+ }
+
+ DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) );
+ return *this;
+ }
+ DisplayPresentInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ )
+ {
+ srcRect = srcRect_;
+ return *this;
+ }
+
+ DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ )
+ {
+ dstRect = dstRect_;
+ return *this;
+ }
+
+ DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ )
+ {
+ persistent = persistent_;
+ return *this;
+ }
+
+ operator const VkDisplayPresentInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayPresentInfoKHR*>(this);
+ }
+
+ bool operator==( DisplayPresentInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcRect == rhs.srcRect )
+ && ( dstRect == rhs.dstRect )
+ && ( persistent == rhs.persistent );
+ }
+
+ bool operator!=( DisplayPresentInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDisplayPresentInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Rect2D srcRect;
+ Rect2D dstRect;
+ Bool32 persistent;
+ };
+ static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ struct AndroidSurfaceCreateInfoKHR
+ {
+ AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), struct ANativeWindow* window_ = nullptr )
+ : flags( flags_ )
+ , window( window_ )
+ {
+ }
+
+ AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
+ }
+
+ AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ AndroidSurfaceCreateInfoKHR& setWindow( struct ANativeWindow* window_ )
+ {
+ window = window_;
+ return *this;
+ }
+
+ operator const VkAndroidSurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( window == rhs.window );
+ }
+
+ bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ AndroidSurfaceCreateFlagsKHR flags;
+ struct ANativeWindow* window;
+ };
+ static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ struct MirSurfaceCreateInfoKHR
+ {
+ MirSurfaceCreateInfoKHR( MirSurfaceCreateFlagsKHR flags_ = MirSurfaceCreateFlagsKHR(), MirConnection* connection_ = nullptr, MirSurface* mirSurface_ = nullptr )
+ : flags( flags_ )
+ , connection( connection_ )
+ , mirSurface( mirSurface_ )
+ {
+ }
+
+ MirSurfaceCreateInfoKHR( VkMirSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MirSurfaceCreateInfoKHR ) );
+ }
+
+ MirSurfaceCreateInfoKHR& operator=( VkMirSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MirSurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ MirSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MirSurfaceCreateInfoKHR& setFlags( MirSurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ MirSurfaceCreateInfoKHR& setConnection( MirConnection* connection_ )
+ {
+ connection = connection_;
+ return *this;
+ }
+
+ MirSurfaceCreateInfoKHR& setMirSurface( MirSurface* mirSurface_ )
+ {
+ mirSurface = mirSurface_;
+ return *this;
+ }
+
+ operator const VkMirSurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( MirSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( connection == rhs.connection )
+ && ( mirSurface == rhs.mirSurface );
+ }
+
+ bool operator!=( MirSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMirSurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ MirSurfaceCreateFlagsKHR flags;
+ MirConnection* connection;
+ MirSurface* mirSurface;
+ };
+ static_assert( sizeof( MirSurfaceCreateInfoKHR ) == sizeof( VkMirSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ struct ViSurfaceCreateInfoNN
+ {
+ ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(), void* window_ = nullptr )
+ : flags( flags_ )
+ , window( window_ )
+ {
+ }
+
+ ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) );
+ }
+
+ ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) );
+ return *this;
+ }
+ ViSurfaceCreateInfoNN& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ViSurfaceCreateInfoNN& setWindow( void* window_ )
+ {
+ window = window_;
+ return *this;
+ }
+
+ operator const VkViSurfaceCreateInfoNN&() const
+ {
+ return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>(this);
+ }
+
+ bool operator==( ViSurfaceCreateInfoNN const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( window == rhs.window );
+ }
+
+ bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eViSurfaceCreateInfoNN;
+
+ public:
+ const void* pNext = nullptr;
+ ViSurfaceCreateFlagsNN flags;
+ void* window;
+ };
+ static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ struct WaylandSurfaceCreateInfoKHR
+ {
+ WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), struct wl_display* display_ = nullptr, struct wl_surface* surface_ = nullptr )
+ : flags( flags_ )
+ , display( display_ )
+ , surface( surface_ )
+ {
+ }
+
+ WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
+ }
+
+ WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ )
+ {
+ display = display_;
+ return *this;
+ }
+
+ WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ )
+ {
+ surface = surface_;
+ return *this;
+ }
+
+ operator const VkWaylandSurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( display == rhs.display )
+ && ( surface == rhs.surface );
+ }
+
+ bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ WaylandSurfaceCreateFlagsKHR flags;
+ struct wl_display* display;
+ struct wl_surface* surface;
+ };
+ static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct Win32SurfaceCreateInfoKHR
+ {
+ Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), HINSTANCE hinstance_ = 0, HWND hwnd_ = 0 )
+ : flags( flags_ )
+ , hinstance( hinstance_ )
+ , hwnd( hwnd_ )
+ {
+ }
+
+ Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
+ }
+
+ Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ )
+ {
+ hinstance = hinstance_;
+ return *this;
+ }
+
+ Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ )
+ {
+ hwnd = hwnd_;
+ return *this;
+ }
+
+ operator const VkWin32SurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( hinstance == rhs.hinstance )
+ && ( hwnd == rhs.hwnd );
+ }
+
+ bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Win32SurfaceCreateFlagsKHR flags;
+ HINSTANCE hinstance;
+ HWND hwnd;
+ };
+ static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ struct XlibSurfaceCreateInfoKHR
+ {
+ XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), Display* dpy_ = nullptr, Window window_ = 0 )
+ : flags( flags_ )
+ , dpy( dpy_ )
+ , window( window_ )
+ {
+ }
+
+ XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
+ }
+
+ XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ )
+ {
+ dpy = dpy_;
+ return *this;
+ }
+
+ XlibSurfaceCreateInfoKHR& setWindow( Window window_ )
+ {
+ window = window_;
+ return *this;
+ }
+
+ operator const VkXlibSurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( dpy == rhs.dpy )
+ && ( window == rhs.window );
+ }
+
+ bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ XlibSurfaceCreateFlagsKHR flags;
+ Display* dpy;
+ Window window;
+ };
+ static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ struct XcbSurfaceCreateInfoKHR
+ {
+ XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), xcb_connection_t* connection_ = nullptr, xcb_window_t window_ = 0 )
+ : flags( flags_ )
+ , connection( connection_ )
+ , window( window_ )
+ {
+ }
+
+ XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
+ }
+
+ XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ )
+ {
+ connection = connection_;
+ return *this;
+ }
+
+ XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ )
+ {
+ window = window_;
+ return *this;
+ }
+
+ operator const VkXcbSurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( connection == rhs.connection )
+ && ( window == rhs.window );
+ }
+
+ bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ XcbSurfaceCreateFlagsKHR flags;
+ xcb_connection_t* connection;
+ xcb_window_t window;
+ };
+ static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+ struct DebugMarkerMarkerInfoEXT
+ {
+ DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
+ : pMarkerName( pMarkerName_ )
+ {
+ memcpy( &color, color_.data(), 4 * sizeof( float ) );
+ }
+
+ DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
+ }
+
+ DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
+ return *this;
+ }
+ DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ )
+ {
+ pMarkerName = pMarkerName_;
+ return *this;
+ }
+
+ DebugMarkerMarkerInfoEXT& setColor( std::array<float,4> color_ )
+ {
+ memcpy( &color, color_.data(), 4 * sizeof( float ) );
+ return *this;
+ }
+
+ operator const VkDebugMarkerMarkerInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>(this);
+ }
+
+ bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pMarkerName == rhs.pMarkerName )
+ && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+ }
+
+ bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ const char* pMarkerName;
+ float color[4];
+ };
+ static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
+
+ struct DedicatedAllocationImageCreateInfoNV
+ {
+ DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
+ : dedicatedAllocation( dedicatedAllocation_ )
+ {
+ }
+
+ DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
+ }
+
+ DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
+ return *this;
+ }
+ DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
+ {
+ dedicatedAllocation = dedicatedAllocation_;
+ return *this;
+ }
+
+ operator const VkDedicatedAllocationImageCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>(this);
+ }
+
+ bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( dedicatedAllocation == rhs.dedicatedAllocation );
+ }
+
+ bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 dedicatedAllocation;
+ };
+ static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
+
+ struct DedicatedAllocationBufferCreateInfoNV
+ {
+ DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
+ : dedicatedAllocation( dedicatedAllocation_ )
+ {
+ }
+
+ DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
+ }
+
+ DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
+ return *this;
+ }
+ DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
+ {
+ dedicatedAllocation = dedicatedAllocation_;
+ return *this;
+ }
+
+ operator const VkDedicatedAllocationBufferCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>(this);
+ }
+
+ bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( dedicatedAllocation == rhs.dedicatedAllocation );
+ }
+
+ bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 dedicatedAllocation;
+ };
+ static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
+
+ struct DedicatedAllocationMemoryAllocateInfoNV
+ {
+ DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), Buffer buffer_ = Buffer() )
+ : image( image_ )
+ , buffer( buffer_ )
+ {
+ }
+
+ DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
+ }
+
+ DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
+ return *this;
+ }
+ DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ operator const VkDedicatedAllocationMemoryAllocateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
+ }
+
+ bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( image == rhs.image )
+ && ( buffer == rhs.buffer );
+ }
+
+ bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ Image image;
+ Buffer buffer;
+ };
+ static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ struct ExportMemoryWin32HandleInfoNV
+ {
+ ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0 )
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ {
+ }
+
+ ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
+ }
+
+ ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
+ return *this;
+ }
+ ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+ {
+ pAttributes = pAttributes_;
+ return *this;
+ }
+
+ ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ )
+ {
+ dwAccess = dwAccess_;
+ return *this;
+ }
+
+ operator const VkExportMemoryWin32HandleInfoNV&() const
+ {
+ return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>(this);
+ }
+
+ bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess );
+ }
+
+ bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ const SECURITY_ATTRIBUTES* pAttributes;
+ DWORD dwAccess;
+ };
+ static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ struct Win32KeyedMutexAcquireReleaseInfoNV
+ {
+ Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
+ : acquireCount( acquireCount_ )
+ , pAcquireSyncs( pAcquireSyncs_ )
+ , pAcquireKeys( pAcquireKeys_ )
+ , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
+ , releaseCount( releaseCount_ )
+ , pReleaseSyncs( pReleaseSyncs_ )
+ , pReleaseKeys( pReleaseKeys_ )
+ {
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
+ return *this;
+ }
+ Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ )
+ {
+ acquireCount = acquireCount_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
+ {
+ pAcquireSyncs = pAcquireSyncs_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
+ {
+ pAcquireKeys = pAcquireKeys_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ )
+ {
+ pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ )
+ {
+ releaseCount = releaseCount_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
+ {
+ pReleaseSyncs = pReleaseSyncs_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
+ {
+ pReleaseKeys = pReleaseKeys_;
+ return *this;
+ }
+
+ operator const VkWin32KeyedMutexAcquireReleaseInfoNV&() const
+ {
+ return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
+ }
+
+ bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( acquireCount == rhs.acquireCount )
+ && ( pAcquireSyncs == rhs.pAcquireSyncs )
+ && ( pAcquireKeys == rhs.pAcquireKeys )
+ && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
+ && ( releaseCount == rhs.releaseCount )
+ && ( pReleaseSyncs == rhs.pReleaseSyncs )
+ && ( pReleaseKeys == rhs.pReleaseKeys );
+ }
+
+ bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t acquireCount;
+ const DeviceMemory* pAcquireSyncs;
+ const uint64_t* pAcquireKeys;
+ const uint32_t* pAcquireTimeoutMilliseconds;
+ uint32_t releaseCount;
+ const DeviceMemory* pReleaseSyncs;
+ const uint64_t* pReleaseKeys;
+ };
+ static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+
+ struct DeviceGeneratedCommandsFeaturesNVX
+ {
+ DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
+ : computeBindingPointSupport( computeBindingPointSupport_ )
+ {
+ }
+
+ DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) );
+ }
+
+ DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) );
+ return *this;
+ }
+ DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
+ {
+ computeBindingPointSupport = computeBindingPointSupport_;
+ return *this;
+ }
+
+ operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
+ {
+ return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
+ }
+
+ bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
+ }
+
+ bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 computeBindingPointSupport;
+ };
+ static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
+
+ struct DeviceGeneratedCommandsLimitsNVX
+ {
+ DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
+ : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
+ , maxObjectEntryCounts( maxObjectEntryCounts_ )
+ , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
+ , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
+ , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
+ {
+ }
+
+ DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) );
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) );
+ return *this;
+ }
+ DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
+ {
+ maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
+ {
+ maxObjectEntryCounts = maxObjectEntryCounts_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
+ {
+ minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
+ {
+ minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
+ return *this;
+ }
+
+ DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
+ {
+ minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
+ return *this;
+ }
+
+ operator const VkDeviceGeneratedCommandsLimitsNVX&() const
+ {
+ return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
+ }
+
+ bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
+ && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
+ && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
+ && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
+ && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
+ }
+
+ bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t maxIndirectCommandsLayoutTokenCount;
+ uint32_t maxObjectEntryCounts;
+ uint32_t minSequenceCountBufferOffsetAlignment;
+ uint32_t minSequenceIndexBufferOffsetAlignment;
+ uint32_t minCommandsTokenBufferOffsetAlignment;
+ };
+ static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
+
+ struct CmdReserveSpaceForCommandsInfoNVX
+ {
+ CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
+ : objectTable( objectTable_ )
+ , indirectCommandsLayout( indirectCommandsLayout_ )
+ , maxSequencesCount( maxSequencesCount_ )
+ {
+ }
+
+ CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) );
+ }
+
+ CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) );
+ return *this;
+ }
+ CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
+ {
+ objectTable = objectTable_;
+ return *this;
+ }
+
+ CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
+ {
+ indirectCommandsLayout = indirectCommandsLayout_;
+ return *this;
+ }
+
+ CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
+ {
+ maxSequencesCount = maxSequencesCount_;
+ return *this;
+ }
+
+ operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
+ {
+ return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
+ }
+
+ bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectTable == rhs.objectTable )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( maxSequencesCount == rhs.maxSequencesCount );
+ }
+
+ bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX;
+
+ public:
+ const void* pNext = nullptr;
+ ObjectTableNVX objectTable;
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ uint32_t maxSequencesCount;
+ };
+ static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceFeatures2
+ {
+ PhysicalDeviceFeatures2( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() )
+ : features( features_ )
+ {
+ }
+
+ PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) );
+ }
+
+ PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) );
+ return *this;
+ }
+ PhysicalDeviceFeatures2& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceFeatures2& setFeatures( PhysicalDeviceFeatures features_ )
+ {
+ features = features_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceFeatures2&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>(this);
+ }
+
+ bool operator==( PhysicalDeviceFeatures2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( features == rhs.features );
+ }
+
+ bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+
+ public:
+ void* pNext = nullptr;
+ PhysicalDeviceFeatures features;
+ };
+ static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
+
+ struct PhysicalDevicePushDescriptorPropertiesKHR
+ {
+ PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 )
+ : maxPushDescriptors( maxPushDescriptors_ )
+ {
+ }
+
+ PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
+ }
+
+ PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
+ return *this;
+ }
+ PhysicalDevicePushDescriptorPropertiesKHR& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDevicePushDescriptorPropertiesKHR& setMaxPushDescriptors( uint32_t maxPushDescriptors_ )
+ {
+ maxPushDescriptors = maxPushDescriptors_;
+ return *this;
+ }
+
+ operator const VkPhysicalDevicePushDescriptorPropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>(this);
+ }
+
+ bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxPushDescriptors == rhs.maxPushDescriptors );
+ }
+
+ bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxPushDescriptors;
+ };
+ static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
+
+ struct PresentRegionsKHR
+ {
+ PresentRegionsKHR( uint32_t swapchainCount_ = 0, const PresentRegionKHR* pRegions_ = nullptr )
+ : swapchainCount( swapchainCount_ )
+ , pRegions( pRegions_ )
+ {
+ }
+
+ PresentRegionsKHR( VkPresentRegionsKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentRegionsKHR ) );
+ }
+
+ PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentRegionsKHR ) );
+ return *this;
+ }
+ PresentRegionsKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PresentRegionsKHR& setSwapchainCount( uint32_t swapchainCount_ )
+ {
+ swapchainCount = swapchainCount_;
+ return *this;
+ }
+
+ PresentRegionsKHR& setPRegions( const PresentRegionKHR* pRegions_ )
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+ operator const VkPresentRegionsKHR&() const
+ {
+ return *reinterpret_cast<const VkPresentRegionsKHR*>(this);
+ }
+
+ bool operator==( PresentRegionsKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( PresentRegionsKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePresentRegionsKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t swapchainCount;
+ const PresentRegionKHR* pRegions;
+ };
+ static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceVariablePointerFeatures
+ {
+ PhysicalDeviceVariablePointerFeatures( Bool32 variablePointersStorageBuffer_ = 0, Bool32 variablePointers_ = 0 )
+ : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
+ , variablePointers( variablePointers_ )
+ {
+ }
+
+ PhysicalDeviceVariablePointerFeatures( VkPhysicalDeviceVariablePointerFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) );
+ }
+
+ PhysicalDeviceVariablePointerFeatures& operator=( VkPhysicalDeviceVariablePointerFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) );
+ return *this;
+ }
+ PhysicalDeviceVariablePointerFeatures& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceVariablePointerFeatures& setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ )
+ {
+ variablePointersStorageBuffer = variablePointersStorageBuffer_;
+ return *this;
+ }
+
+ PhysicalDeviceVariablePointerFeatures& setVariablePointers( Bool32 variablePointers_ )
+ {
+ variablePointers = variablePointers_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceVariablePointerFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDeviceVariablePointerFeatures const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+ && ( variablePointers == rhs.variablePointers );
+ }
+
+ bool operator!=( PhysicalDeviceVariablePointerFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceVariablePointerFeatures;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 variablePointersStorageBuffer;
+ Bool32 variablePointers;
+ };
+ static_assert( sizeof( PhysicalDeviceVariablePointerFeatures ) == sizeof( VkPhysicalDeviceVariablePointerFeatures ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointerFeatures;
+
+ struct PhysicalDeviceIDProperties
+ {
+ operator const VkPhysicalDeviceIDProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceIDProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( deviceNodeMask == rhs.deviceNodeMask )
+ && ( deviceLUIDValid == rhs.deviceLUIDValid );
+ }
+
+ bool operator!=( PhysicalDeviceIDProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceIdProperties;
+
+ public:
+ void* pNext = nullptr;
+ uint8_t deviceUUID[VK_UUID_SIZE];
+ uint8_t driverUUID[VK_UUID_SIZE];
+ uint8_t deviceLUID[VK_LUID_SIZE];
+ uint32_t deviceNodeMask;
+ Bool32 deviceLUIDValid;
+ };
+ static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportMemoryWin32HandleInfoKHR
+ {
+ ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 )
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ , name( name_ )
+ {
+ }
+
+ ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
+ }
+
+ ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
+ return *this;
+ }
+ ExportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportMemoryWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+ {
+ pAttributes = pAttributes_;
+ return *this;
+ }
+
+ ExportMemoryWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
+ {
+ dwAccess = dwAccess_;
+ return *this;
+ }
+
+ ExportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ )
+ {
+ name = name_;
+ return *this;
+ }
+
+ operator const VkExportMemoryWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ const SECURITY_ATTRIBUTES* pAttributes;
+ DWORD dwAccess;
+ LPCWSTR name;
+ };
+ static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct MemoryWin32HandlePropertiesKHR
+ {
+ operator const VkMemoryWin32HandlePropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>(this);
+ }
+
+ bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t memoryTypeBits;
+ };
+ static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct MemoryFdPropertiesKHR
+ {
+ operator const VkMemoryFdPropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>(this);
+ }
+
+ bool operator==( MemoryFdPropertiesKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryFdPropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryFdPropertiesKHR;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t memoryTypeBits;
+ };
+ static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct Win32KeyedMutexAcquireReleaseInfoKHR
+ {
+ Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeouts_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
+ : acquireCount( acquireCount_ )
+ , pAcquireSyncs( pAcquireSyncs_ )
+ , pAcquireKeys( pAcquireKeys_ )
+ , pAcquireTimeouts( pAcquireTimeouts_ )
+ , releaseCount( releaseCount_ )
+ , pReleaseSyncs( pReleaseSyncs_ )
+ , pReleaseKeys( pReleaseKeys_ )
+ {
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
+ return *this;
+ }
+ Win32KeyedMutexAcquireReleaseInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setAcquireCount( uint32_t acquireCount_ )
+ {
+ acquireCount = acquireCount_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
+ {
+ pAcquireSyncs = pAcquireSyncs_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
+ {
+ pAcquireKeys = pAcquireKeys_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ )
+ {
+ pAcquireTimeouts = pAcquireTimeouts_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setReleaseCount( uint32_t releaseCount_ )
+ {
+ releaseCount = releaseCount_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
+ {
+ pReleaseSyncs = pReleaseSyncs_;
+ return *this;
+ }
+
+ Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
+ {
+ pReleaseKeys = pReleaseKeys_;
+ return *this;
+ }
+
+ operator const VkWin32KeyedMutexAcquireReleaseInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>(this);
+ }
+
+ bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( acquireCount == rhs.acquireCount )
+ && ( pAcquireSyncs == rhs.pAcquireSyncs )
+ && ( pAcquireKeys == rhs.pAcquireKeys )
+ && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
+ && ( releaseCount == rhs.releaseCount )
+ && ( pReleaseSyncs == rhs.pReleaseSyncs )
+ && ( pReleaseKeys == rhs.pReleaseKeys );
+ }
+
+ bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t acquireCount;
+ const DeviceMemory* pAcquireSyncs;
+ const uint64_t* pAcquireKeys;
+ const uint32_t* pAcquireTimeouts;
+ uint32_t releaseCount;
+ const DeviceMemory* pReleaseSyncs;
+ const uint64_t* pReleaseKeys;
+ };
+ static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportSemaphoreWin32HandleInfoKHR
+ {
+ ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 )
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ , name( name_ )
+ {
+ }
+
+ ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
+ }
+
+ ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
+ return *this;
+ }
+ ExportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportSemaphoreWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+ {
+ pAttributes = pAttributes_;
+ return *this;
+ }
+
+ ExportSemaphoreWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
+ {
+ dwAccess = dwAccess_;
+ return *this;
+ }
+
+ ExportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ )
+ {
+ name = name_;
+ return *this;
+ }
+
+ operator const VkExportSemaphoreWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ const SECURITY_ATTRIBUTES* pAttributes;
+ DWORD dwAccess;
+ LPCWSTR name;
+ };
+ static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct D3D12FenceSubmitInfoKHR
+ {
+ D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0, const uint64_t* pWaitSemaphoreValues_ = nullptr, uint32_t signalSemaphoreValuesCount_ = 0, const uint64_t* pSignalSemaphoreValues_ = nullptr )
+ : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
+ , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
+ , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
+ , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+ {
+ }
+
+ D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
+ }
+
+ D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
+ return *this;
+ }
+ D3D12FenceSubmitInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ D3D12FenceSubmitInfoKHR& setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ )
+ {
+ waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
+ return *this;
+ }
+
+ D3D12FenceSubmitInfoKHR& setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ )
+ {
+ pWaitSemaphoreValues = pWaitSemaphoreValues_;
+ return *this;
+ }
+
+ D3D12FenceSubmitInfoKHR& setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ )
+ {
+ signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+ return *this;
+ }
+
+ D3D12FenceSubmitInfoKHR& setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ )
+ {
+ pSignalSemaphoreValues = pSignalSemaphoreValues_;
+ return *this;
+ }
+
+ operator const VkD3D12FenceSubmitInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>(this);
+ }
+
+ bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+ && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+ && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+ && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+ }
+
+ bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t waitSemaphoreValuesCount;
+ const uint64_t* pWaitSemaphoreValues;
+ uint32_t signalSemaphoreValuesCount;
+ const uint64_t* pSignalSemaphoreValues;
+ };
+ static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ExportFenceWin32HandleInfoKHR
+ {
+ ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 )
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ , name( name_ )
+ {
+ }
+
+ ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
+ }
+
+ ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
+ return *this;
+ }
+ ExportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportFenceWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+ {
+ pAttributes = pAttributes_;
+ return *this;
+ }
+
+ ExportFenceWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
+ {
+ dwAccess = dwAccess_;
+ return *this;
+ }
+
+ ExportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ )
+ {
+ name = name_;
+ return *this;
+ }
+
+ operator const VkExportFenceWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pAttributes == rhs.pAttributes )
+ && ( dwAccess == rhs.dwAccess )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ const SECURITY_ATTRIBUTES* pAttributes;
+ DWORD dwAccess;
+ LPCWSTR name;
+ };
+ static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct PhysicalDeviceMultiviewFeatures
+ {
+ PhysicalDeviceMultiviewFeatures( Bool32 multiview_ = 0, Bool32 multiviewGeometryShader_ = 0, Bool32 multiviewTessellationShader_ = 0 )
+ : multiview( multiview_ )
+ , multiviewGeometryShader( multiviewGeometryShader_ )
+ , multiviewTessellationShader( multiviewTessellationShader_ )
+ {
+ }
+
+ PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) );
+ }
+
+ PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) );
+ return *this;
+ }
+ PhysicalDeviceMultiviewFeatures& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceMultiviewFeatures& setMultiview( Bool32 multiview_ )
+ {
+ multiview = multiview_;
+ return *this;
+ }
+
+ PhysicalDeviceMultiviewFeatures& setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ )
+ {
+ multiviewGeometryShader = multiviewGeometryShader_;
+ return *this;
+ }
+
+ PhysicalDeviceMultiviewFeatures& setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ )
+ {
+ multiviewTessellationShader = multiviewTessellationShader_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceMultiviewFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( multiview == rhs.multiview )
+ && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+ && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
+ }
+
+ bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 multiview;
+ Bool32 multiviewGeometryShader;
+ Bool32 multiviewTessellationShader;
+ };
+ static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+
+ struct PhysicalDeviceMultiviewProperties
+ {
+ operator const VkPhysicalDeviceMultiviewProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+ && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
+ }
+
+ bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxMultiviewViewCount;
+ uint32_t maxMultiviewInstanceIndex;
+ };
+ static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+
+ struct RenderPassMultiviewCreateInfo
+ {
+ RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0, const uint32_t* pViewMasks_ = nullptr, uint32_t dependencyCount_ = 0, const int32_t* pViewOffsets_ = nullptr, uint32_t correlationMaskCount_ = 0, const uint32_t* pCorrelationMasks_ = nullptr )
+ : subpassCount( subpassCount_ )
+ , pViewMasks( pViewMasks_ )
+ , dependencyCount( dependencyCount_ )
+ , pViewOffsets( pViewOffsets_ )
+ , correlationMaskCount( correlationMaskCount_ )
+ , pCorrelationMasks( pCorrelationMasks_ )
+ {
+ }
+
+ RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) );
+ }
+
+ RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) );
+ return *this;
+ }
+ RenderPassMultiviewCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo& setSubpassCount( uint32_t subpassCount_ )
+ {
+ subpassCount = subpassCount_;
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo& setPViewMasks( const uint32_t* pViewMasks_ )
+ {
+ pViewMasks = pViewMasks_;
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
+ {
+ dependencyCount = dependencyCount_;
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo& setPViewOffsets( const int32_t* pViewOffsets_ )
+ {
+ pViewOffsets = pViewOffsets_;
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo& setCorrelationMaskCount( uint32_t correlationMaskCount_ )
+ {
+ correlationMaskCount = correlationMaskCount_;
+ return *this;
+ }
+
+ RenderPassMultiviewCreateInfo& setPCorrelationMasks( const uint32_t* pCorrelationMasks_ )
+ {
+ pCorrelationMasks = pCorrelationMasks_;
+ return *this;
+ }
+
+ operator const VkRenderPassMultiviewCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>(this);
+ }
+
+ bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( subpassCount == rhs.subpassCount )
+ && ( pViewMasks == rhs.pViewMasks )
+ && ( dependencyCount == rhs.dependencyCount )
+ && ( pViewOffsets == rhs.pViewOffsets )
+ && ( correlationMaskCount == rhs.correlationMaskCount )
+ && ( pCorrelationMasks == rhs.pCorrelationMasks );
+ }
+
+ bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t subpassCount;
+ const uint32_t* pViewMasks;
+ uint32_t dependencyCount;
+ const int32_t* pViewOffsets;
+ uint32_t correlationMaskCount;
+ const uint32_t* pCorrelationMasks;
+ };
+ static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
+
+ using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+
+ struct BindBufferMemoryInfo
+ {
+ BindBufferMemoryInfo( Buffer buffer_ = Buffer(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0 )
+ : buffer( buffer_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
+ {
+ }
+
+ BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) );
+ }
+
+ BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) );
+ return *this;
+ }
+ BindBufferMemoryInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindBufferMemoryInfo& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ BindBufferMemoryInfo& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ BindBufferMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ )
+ {
+ memoryOffset = memoryOffset_;
+ return *this;
+ }
+
+ operator const VkBindBufferMemoryInfo&() const
+ {
+ return *reinterpret_cast<const VkBindBufferMemoryInfo*>(this);
+ }
+
+ bool operator==( BindBufferMemoryInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( buffer == rhs.buffer )
+ && ( memory == rhs.memory )
+ && ( memoryOffset == rhs.memoryOffset );
+ }
+
+ bool operator!=( BindBufferMemoryInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindBufferMemoryInfo;
+
+ public:
+ const void* pNext = nullptr;
+ Buffer buffer;
+ DeviceMemory memory;
+ DeviceSize memoryOffset;
+ };
+ static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
+
+ using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+
+ struct BindBufferMemoryDeviceGroupInfo
+ {
+ BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, const uint32_t* pDeviceIndices_ = nullptr )
+ : deviceIndexCount( deviceIndexCount_ )
+ , pDeviceIndices( pDeviceIndices_ )
+ {
+ }
+
+ BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) );
+ }
+
+ BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) );
+ return *this;
+ }
+ BindBufferMemoryDeviceGroupInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindBufferMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ )
+ {
+ deviceIndexCount = deviceIndexCount_;
+ return *this;
+ }
+
+ BindBufferMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
+ {
+ pDeviceIndices = pDeviceIndices_;
+ return *this;
+ }
+
+ operator const VkBindBufferMemoryDeviceGroupInfo&() const
+ {
+ return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>(this);
+ }
+
+ bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceIndexCount == rhs.deviceIndexCount )
+ && ( pDeviceIndices == rhs.pDeviceIndices );
+ }
+
+ bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t deviceIndexCount;
+ const uint32_t* pDeviceIndices;
+ };
+ static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+
+ using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
+
+ struct BindImageMemoryInfo
+ {
+ BindImageMemoryInfo( Image image_ = Image(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0 )
+ : image( image_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
+ {
+ }
+
+ BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) );
+ }
+
+ BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) );
+ return *this;
+ }
+ BindImageMemoryInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindImageMemoryInfo& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ BindImageMemoryInfo& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ BindImageMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ )
+ {
+ memoryOffset = memoryOffset_;
+ return *this;
+ }
+
+ operator const VkBindImageMemoryInfo&() const
+ {
+ return *reinterpret_cast<const VkBindImageMemoryInfo*>(this);
+ }
+
+ bool operator==( BindImageMemoryInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( image == rhs.image )
+ && ( memory == rhs.memory )
+ && ( memoryOffset == rhs.memoryOffset );
+ }
+
+ bool operator!=( BindImageMemoryInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindImageMemoryInfo;
+
+ public:
+ const void* pNext = nullptr;
+ Image image;
+ DeviceMemory memory;
+ DeviceSize memoryOffset;
+ };
+ static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
+
+ using BindImageMemoryInfoKHR = BindImageMemoryInfo;
+
+ struct BindImageMemoryDeviceGroupInfo
+ {
+ BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, const uint32_t* pDeviceIndices_ = nullptr, uint32_t splitInstanceBindRegionCount_ = 0, const Rect2D* pSplitInstanceBindRegions_ = nullptr )
+ : deviceIndexCount( deviceIndexCount_ )
+ , pDeviceIndices( pDeviceIndices_ )
+ , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
+ , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
+ {
+ }
+
+ BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) );
+ }
+
+ BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) );
+ return *this;
+ }
+ BindImageMemoryDeviceGroupInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindImageMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ )
+ {
+ deviceIndexCount = deviceIndexCount_;
+ return *this;
+ }
+
+ BindImageMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
+ {
+ pDeviceIndices = pDeviceIndices_;
+ return *this;
+ }
+
+ BindImageMemoryDeviceGroupInfo& setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ )
+ {
+ splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
+ return *this;
+ }
+
+ BindImageMemoryDeviceGroupInfo& setPSplitInstanceBindRegions( const Rect2D* pSplitInstanceBindRegions_ )
+ {
+ pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
+ return *this;
+ }
+
+ operator const VkBindImageMemoryDeviceGroupInfo&() const
+ {
+ return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>(this);
+ }
+
+ bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceIndexCount == rhs.deviceIndexCount )
+ && ( pDeviceIndices == rhs.pDeviceIndices )
+ && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
+ && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
+ }
+
+ bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t deviceIndexCount;
+ const uint32_t* pDeviceIndices;
+ uint32_t splitInstanceBindRegionCount;
+ const Rect2D* pSplitInstanceBindRegions;
+ };
+ static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+
+ using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+
+ struct DeviceGroupRenderPassBeginInfo
+ {
+ DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0, uint32_t deviceRenderAreaCount_ = 0, const Rect2D* pDeviceRenderAreas_ = nullptr )
+ : deviceMask( deviceMask_ )
+ , deviceRenderAreaCount( deviceRenderAreaCount_ )
+ , pDeviceRenderAreas( pDeviceRenderAreas_ )
+ {
+ }
+
+ DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) );
+ }
+
+ DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) );
+ return *this;
+ }
+ DeviceGroupRenderPassBeginInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupRenderPassBeginInfo& setDeviceMask( uint32_t deviceMask_ )
+ {
+ deviceMask = deviceMask_;
+ return *this;
+ }
+
+ DeviceGroupRenderPassBeginInfo& setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ )
+ {
+ deviceRenderAreaCount = deviceRenderAreaCount_;
+ return *this;
+ }
+
+ DeviceGroupRenderPassBeginInfo& setPDeviceRenderAreas( const Rect2D* pDeviceRenderAreas_ )
+ {
+ pDeviceRenderAreas = pDeviceRenderAreas_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupRenderPassBeginInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>(this);
+ }
+
+ bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceMask == rhs.deviceMask )
+ && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
+ && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+ }
+
+ bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t deviceMask;
+ uint32_t deviceRenderAreaCount;
+ const Rect2D* pDeviceRenderAreas;
+ };
+ static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
+
+ using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+
+ struct DeviceGroupCommandBufferBeginInfo
+ {
+ DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 )
+ : deviceMask( deviceMask_ )
+ {
+ }
+
+ DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) );
+ }
+
+ DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) );
+ return *this;
+ }
+ DeviceGroupCommandBufferBeginInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupCommandBufferBeginInfo& setDeviceMask( uint32_t deviceMask_ )
+ {
+ deviceMask = deviceMask_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupCommandBufferBeginInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>(this);
+ }
+
+ bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceMask == rhs.deviceMask );
+ }
+
+ bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t deviceMask;
+ };
+ static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+
+ using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+
+ struct DeviceGroupSubmitInfo
+ {
+ DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0, const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr, uint32_t commandBufferCount_ = 0, const uint32_t* pCommandBufferDeviceMasks_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr )
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
+ , commandBufferCount( commandBufferCount_ )
+ , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
+ {
+ }
+
+ DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) );
+ }
+
+ DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) );
+ return *this;
+ }
+ DeviceGroupSubmitInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ {
+ waitSemaphoreCount = waitSemaphoreCount_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo& setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ )
+ {
+ pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+ {
+ commandBufferCount = commandBufferCount_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo& setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ )
+ {
+ pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+ {
+ signalSemaphoreCount = signalSemaphoreCount_;
+ return *this;
+ }
+
+ DeviceGroupSubmitInfo& setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ )
+ {
+ pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupSubmitInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>(this);
+ }
+
+ bool operator==( DeviceGroupSubmitInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
+ && ( commandBufferCount == rhs.commandBufferCount )
+ && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
+ && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+ && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
+ }
+
+ bool operator!=( DeviceGroupSubmitInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupSubmitInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t waitSemaphoreCount;
+ const uint32_t* pWaitSemaphoreDeviceIndices;
+ uint32_t commandBufferCount;
+ const uint32_t* pCommandBufferDeviceMasks;
+ uint32_t signalSemaphoreCount;
+ const uint32_t* pSignalSemaphoreDeviceIndices;
+ };
+ static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
+
+ using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
+
+ struct DeviceGroupBindSparseInfo
+ {
+ DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0, uint32_t memoryDeviceIndex_ = 0 )
+ : resourceDeviceIndex( resourceDeviceIndex_ )
+ , memoryDeviceIndex( memoryDeviceIndex_ )
+ {
+ }
+
+ DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) );
+ }
+
+ DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) );
+ return *this;
+ }
+ DeviceGroupBindSparseInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupBindSparseInfo& setResourceDeviceIndex( uint32_t resourceDeviceIndex_ )
+ {
+ resourceDeviceIndex = resourceDeviceIndex_;
+ return *this;
+ }
+
+ DeviceGroupBindSparseInfo& setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ )
+ {
+ memoryDeviceIndex = memoryDeviceIndex_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupBindSparseInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>(this);
+ }
+
+ bool operator==( DeviceGroupBindSparseInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+ && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+ }
+
+ bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t resourceDeviceIndex;
+ uint32_t memoryDeviceIndex;
+ };
+ static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
+
+ using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
+
+ struct ImageSwapchainCreateInfoKHR
+ {
+ ImageSwapchainCreateInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR() )
+ : swapchain( swapchain_ )
+ {
+ }
+
+ ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) );
+ }
+
+ ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) );
+ return *this;
+ }
+ ImageSwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageSwapchainCreateInfoKHR& setSwapchain( SwapchainKHR swapchain_ )
+ {
+ swapchain = swapchain_;
+ return *this;
+ }
+
+ operator const VkImageSwapchainCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>(this);
+ }
+
+ bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchain == rhs.swapchain );
+ }
+
+ bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ SwapchainKHR swapchain;
+ };
+ static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ struct BindImageMemorySwapchainInfoKHR
+ {
+ BindImageMemorySwapchainInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(), uint32_t imageIndex_ = 0 )
+ : swapchain( swapchain_ )
+ , imageIndex( imageIndex_ )
+ {
+ }
+
+ BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) );
+ }
+
+ BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) );
+ return *this;
+ }
+ BindImageMemorySwapchainInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindImageMemorySwapchainInfoKHR& setSwapchain( SwapchainKHR swapchain_ )
+ {
+ swapchain = swapchain_;
+ return *this;
+ }
+
+ BindImageMemorySwapchainInfoKHR& setImageIndex( uint32_t imageIndex_ )
+ {
+ imageIndex = imageIndex_;
+ return *this;
+ }
+
+ operator const VkBindImageMemorySwapchainInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(this);
+ }
+
+ bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchain == rhs.swapchain )
+ && ( imageIndex == rhs.imageIndex );
+ }
+
+ bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ SwapchainKHR swapchain;
+ uint32_t imageIndex;
+ };
+ static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
+
+ struct AcquireNextImageInfoKHR
+ {
+ AcquireNextImageInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(), uint64_t timeout_ = 0, Semaphore semaphore_ = Semaphore(), Fence fence_ = Fence(), uint32_t deviceMask_ = 0 )
+ : swapchain( swapchain_ )
+ , timeout( timeout_ )
+ , semaphore( semaphore_ )
+ , fence( fence_ )
+ , deviceMask( deviceMask_ )
+ {
+ }
+
+ AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) );
+ }
+
+ AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) );
+ return *this;
+ }
+ AcquireNextImageInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ AcquireNextImageInfoKHR& setSwapchain( SwapchainKHR swapchain_ )
+ {
+ swapchain = swapchain_;
+ return *this;
+ }
+
+ AcquireNextImageInfoKHR& setTimeout( uint64_t timeout_ )
+ {
+ timeout = timeout_;
+ return *this;
+ }
+
+ AcquireNextImageInfoKHR& setSemaphore( Semaphore semaphore_ )
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ AcquireNextImageInfoKHR& setFence( Fence fence_ )
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ AcquireNextImageInfoKHR& setDeviceMask( uint32_t deviceMask_ )
+ {
+ deviceMask = deviceMask_;
+ return *this;
+ }
+
+ operator const VkAcquireNextImageInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>(this);
+ }
+
+ bool operator==( AcquireNextImageInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchain == rhs.swapchain )
+ && ( timeout == rhs.timeout )
+ && ( semaphore == rhs.semaphore )
+ && ( fence == rhs.fence )
+ && ( deviceMask == rhs.deviceMask );
+ }
+
+ bool operator!=( AcquireNextImageInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eAcquireNextImageInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ SwapchainKHR swapchain;
+ uint64_t timeout;
+ Semaphore semaphore;
+ Fence fence;
+ uint32_t deviceMask;
+ };
+ static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
+
+ struct HdrMetadataEXT
+ {
+ HdrMetadataEXT( XYColorEXT displayPrimaryRed_ = XYColorEXT(), XYColorEXT displayPrimaryGreen_ = XYColorEXT(), XYColorEXT displayPrimaryBlue_ = XYColorEXT(), XYColorEXT whitePoint_ = XYColorEXT(), float maxLuminance_ = 0, float minLuminance_ = 0, float maxContentLightLevel_ = 0, float maxFrameAverageLightLevel_ = 0 )
+ : displayPrimaryRed( displayPrimaryRed_ )
+ , displayPrimaryGreen( displayPrimaryGreen_ )
+ , displayPrimaryBlue( displayPrimaryBlue_ )
+ , whitePoint( whitePoint_ )
+ , maxLuminance( maxLuminance_ )
+ , minLuminance( minLuminance_ )
+ , maxContentLightLevel( maxContentLightLevel_ )
+ , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
+ {
+ }
+
+ HdrMetadataEXT( VkHdrMetadataEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( HdrMetadataEXT ) );
+ }
+
+ HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( HdrMetadataEXT ) );
+ return *this;
+ }
+ HdrMetadataEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setDisplayPrimaryRed( XYColorEXT displayPrimaryRed_ )
+ {
+ displayPrimaryRed = displayPrimaryRed_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setDisplayPrimaryGreen( XYColorEXT displayPrimaryGreen_ )
+ {
+ displayPrimaryGreen = displayPrimaryGreen_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setDisplayPrimaryBlue( XYColorEXT displayPrimaryBlue_ )
+ {
+ displayPrimaryBlue = displayPrimaryBlue_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setWhitePoint( XYColorEXT whitePoint_ )
+ {
+ whitePoint = whitePoint_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setMaxLuminance( float maxLuminance_ )
+ {
+ maxLuminance = maxLuminance_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setMinLuminance( float minLuminance_ )
+ {
+ minLuminance = minLuminance_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setMaxContentLightLevel( float maxContentLightLevel_ )
+ {
+ maxContentLightLevel = maxContentLightLevel_;
+ return *this;
+ }
+
+ HdrMetadataEXT& setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ )
+ {
+ maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
+ return *this;
+ }
+
+ operator const VkHdrMetadataEXT&() const
+ {
+ return *reinterpret_cast<const VkHdrMetadataEXT*>(this);
+ }
+
+ bool operator==( HdrMetadataEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( displayPrimaryRed == rhs.displayPrimaryRed )
+ && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
+ && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
+ && ( whitePoint == rhs.whitePoint )
+ && ( maxLuminance == rhs.maxLuminance )
+ && ( minLuminance == rhs.minLuminance )
+ && ( maxContentLightLevel == rhs.maxContentLightLevel )
+ && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
+ }
+
+ bool operator!=( HdrMetadataEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eHdrMetadataEXT;
+
+ public:
+ const void* pNext = nullptr;
+ XYColorEXT displayPrimaryRed;
+ XYColorEXT displayPrimaryGreen;
+ XYColorEXT displayPrimaryBlue;
+ XYColorEXT whitePoint;
+ float maxLuminance;
+ float minLuminance;
+ float maxContentLightLevel;
+ float maxFrameAverageLightLevel;
+ };
+ static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+
+ struct PresentTimesInfoGOOGLE
+ {
+ PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0, const PresentTimeGOOGLE* pTimes_ = nullptr )
+ : swapchainCount( swapchainCount_ )
+ , pTimes( pTimes_ )
+ {
+ }
+
+ PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) );
+ }
+
+ PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) );
+ return *this;
+ }
+ PresentTimesInfoGOOGLE& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PresentTimesInfoGOOGLE& setSwapchainCount( uint32_t swapchainCount_ )
+ {
+ swapchainCount = swapchainCount_;
+ return *this;
+ }
+
+ PresentTimesInfoGOOGLE& setPTimes( const PresentTimeGOOGLE* pTimes_ )
+ {
+ pTimes = pTimes_;
+ return *this;
+ }
+
+ operator const VkPresentTimesInfoGOOGLE&() const
+ {
+ return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>(this);
+ }
+
+ bool operator==( PresentTimesInfoGOOGLE const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pTimes == rhs.pTimes );
+ }
+
+ bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t swapchainCount;
+ const PresentTimeGOOGLE* pTimes;
+ };
+ static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ struct IOSSurfaceCreateInfoMVK
+ {
+ IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = IOSSurfaceCreateFlagsMVK(), const void* pView_ = nullptr )
+ : flags( flags_ )
+ , pView( pView_ )
+ {
+ }
+
+ IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
+ }
+
+ IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
+ return *this;
+ }
+ IOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ IOSSurfaceCreateInfoMVK& setFlags( IOSSurfaceCreateFlagsMVK flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ IOSSurfaceCreateInfoMVK& setPView( const void* pView_ )
+ {
+ pView = pView_;
+ return *this;
+ }
+
+ operator const VkIOSSurfaceCreateInfoMVK&() const
+ {
+ return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>(this);
+ }
+
+ bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pView == rhs.pView );
+ }
+
+ bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
+
+ public:
+ const void* pNext = nullptr;
+ IOSSurfaceCreateFlagsMVK flags;
+ const void* pView;
+ };
+ static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ struct MacOSSurfaceCreateInfoMVK
+ {
+ MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = MacOSSurfaceCreateFlagsMVK(), const void* pView_ = nullptr )
+ : flags( flags_ )
+ , pView( pView_ )
+ {
+ }
+
+ MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
+ }
+
+ MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
+ return *this;
+ }
+ MacOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MacOSSurfaceCreateInfoMVK& setFlags( MacOSSurfaceCreateFlagsMVK flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ MacOSSurfaceCreateInfoMVK& setPView( const void* pView_ )
+ {
+ pView = pView_;
+ return *this;
+ }
+
+ operator const VkMacOSSurfaceCreateInfoMVK&() const
+ {
+ return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>(this);
+ }
+
+ bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pView == rhs.pView );
+ }
+
+ bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
+
+ public:
+ const void* pNext = nullptr;
+ MacOSSurfaceCreateFlagsMVK flags;
+ const void* pView;
+ };
+ static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ struct PipelineViewportWScalingStateCreateInfoNV
+ {
+ PipelineViewportWScalingStateCreateInfoNV( Bool32 viewportWScalingEnable_ = 0, uint32_t viewportCount_ = 0, const ViewportWScalingNV* pViewportWScalings_ = nullptr )
+ : viewportWScalingEnable( viewportWScalingEnable_ )
+ , viewportCount( viewportCount_ )
+ , pViewportWScalings( pViewportWScalings_ )
+ {
+ }
+
+ PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
+ }
+
+ PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
+ return *this;
+ }
+ PipelineViewportWScalingStateCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineViewportWScalingStateCreateInfoNV& setViewportWScalingEnable( Bool32 viewportWScalingEnable_ )
+ {
+ viewportWScalingEnable = viewportWScalingEnable_;
+ return *this;
+ }
+
+ PipelineViewportWScalingStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
+ {
+ viewportCount = viewportCount_;
+ return *this;
+ }
+
+ PipelineViewportWScalingStateCreateInfoNV& setPViewportWScalings( const ViewportWScalingNV* pViewportWScalings_ )
+ {
+ pViewportWScalings = pViewportWScalings_;
+ return *this;
+ }
+
+ operator const VkPipelineViewportWScalingStateCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>(this);
+ }
+
+ bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
+ && ( viewportCount == rhs.viewportCount )
+ && ( pViewportWScalings == rhs.pViewportWScalings );
+ }
+
+ bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 viewportWScalingEnable;
+ uint32_t viewportCount;
+ const ViewportWScalingNV* pViewportWScalings;
+ };
+ static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceDiscardRectanglePropertiesEXT
+ {
+ PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 )
+ : maxDiscardRectangles( maxDiscardRectangles_ )
+ {
+ }
+
+ PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
+ }
+
+ PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
+ return *this;
+ }
+ PhysicalDeviceDiscardRectanglePropertiesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceDiscardRectanglePropertiesEXT& setMaxDiscardRectangles( uint32_t maxDiscardRectangles_ )
+ {
+ maxDiscardRectangles = maxDiscardRectangles_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceDiscardRectanglePropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
+ }
+
+ bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxDiscardRectangles;
+ };
+ static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+ {
+ operator const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>(this);
+ }
+
+ bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
+ }
+
+ bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 perViewPositionAllComponents;
+ };
+ static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceSurfaceInfo2KHR
+ {
+ PhysicalDeviceSurfaceInfo2KHR( SurfaceKHR surface_ = SurfaceKHR() )
+ : surface( surface_ )
+ {
+ }
+
+ PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
+ }
+
+ PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
+ return *this;
+ }
+ PhysicalDeviceSurfaceInfo2KHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceSurfaceInfo2KHR& setSurface( SurfaceKHR surface_ )
+ {
+ surface = surface_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceSurfaceInfo2KHR&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( surface == rhs.surface );
+ }
+
+ bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+
+ public:
+ const void* pNext = nullptr;
+ SurfaceKHR surface;
+ };
+ static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
+
+ struct PhysicalDevice16BitStorageFeatures
+ {
+ PhysicalDevice16BitStorageFeatures( Bool32 storageBuffer16BitAccess_ = 0, Bool32 uniformAndStorageBuffer16BitAccess_ = 0, Bool32 storagePushConstant16_ = 0, Bool32 storageInputOutput16_ = 0 )
+ : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+ , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
+ , storagePushConstant16( storagePushConstant16_ )
+ , storageInputOutput16( storageInputOutput16_ )
+ {
+ }
+
+ PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) );
+ }
+
+ PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) );
+ return *this;
+ }
+ PhysicalDevice16BitStorageFeatures& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDevice16BitStorageFeatures& setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ )
+ {
+ storageBuffer16BitAccess = storageBuffer16BitAccess_;
+ return *this;
+ }
+
+ PhysicalDevice16BitStorageFeatures& setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ )
+ {
+ uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+ return *this;
+ }
+
+ PhysicalDevice16BitStorageFeatures& setStoragePushConstant16( Bool32 storagePushConstant16_ )
+ {
+ storagePushConstant16 = storagePushConstant16_;
+ return *this;
+ }
+
+ PhysicalDevice16BitStorageFeatures& setStorageInputOutput16( Bool32 storageInputOutput16_ )
+ {
+ storageInputOutput16 = storageInputOutput16_;
+ return *this;
+ }
+
+ operator const VkPhysicalDevice16BitStorageFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+ && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+ && ( storagePushConstant16 == rhs.storagePushConstant16 )
+ && ( storageInputOutput16 == rhs.storageInputOutput16 );
+ }
+
+ bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 storageBuffer16BitAccess;
+ Bool32 uniformAndStorageBuffer16BitAccess;
+ Bool32 storagePushConstant16;
+ Bool32 storageInputOutput16;
+ };
+ static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
+
+ using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+
+ struct BufferMemoryRequirementsInfo2
+ {
+ BufferMemoryRequirementsInfo2( Buffer buffer_ = Buffer() )
+ : buffer( buffer_ )
+ {
+ }
+
+ BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) );
+ }
+
+ BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) );
+ return *this;
+ }
+ BufferMemoryRequirementsInfo2& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferMemoryRequirementsInfo2& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ operator const VkBufferMemoryRequirementsInfo2&() const
+ {
+ return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>(this);
+ }
+
+ bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( buffer == rhs.buffer );
+ }
+
+ bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
+
+ public:
+ const void* pNext = nullptr;
+ Buffer buffer;
+ };
+ static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+
+ using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+
+ struct ImageMemoryRequirementsInfo2
+ {
+ ImageMemoryRequirementsInfo2( Image image_ = Image() )
+ : image( image_ )
+ {
+ }
+
+ ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) );
+ }
+
+ ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) );
+ return *this;
+ }
+ ImageMemoryRequirementsInfo2& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageMemoryRequirementsInfo2& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ operator const VkImageMemoryRequirementsInfo2&() const
+ {
+ return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>(this);
+ }
+
+ bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( image == rhs.image );
+ }
+
+ bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
+
+ public:
+ const void* pNext = nullptr;
+ Image image;
+ };
+ static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+
+ using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+
+ struct ImageSparseMemoryRequirementsInfo2
+ {
+ ImageSparseMemoryRequirementsInfo2( Image image_ = Image() )
+ : image( image_ )
+ {
+ }
+
+ ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) );
+ }
+
+ ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) );
+ return *this;
+ }
+ ImageSparseMemoryRequirementsInfo2& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageSparseMemoryRequirementsInfo2& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ operator const VkImageSparseMemoryRequirementsInfo2&() const
+ {
+ return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>(this);
+ }
+
+ bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( image == rhs.image );
+ }
+
+ bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
+
+ public:
+ const void* pNext = nullptr;
+ Image image;
+ };
+ static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+
+ using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+
+ struct MemoryRequirements2
+ {
+ operator const VkMemoryRequirements2&() const
+ {
+ return *reinterpret_cast<const VkMemoryRequirements2*>(this);
+ }
+
+ bool operator==( MemoryRequirements2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryRequirements == rhs.memoryRequirements );
+ }
+
+ bool operator!=( MemoryRequirements2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryRequirements2;
+
+ public:
+ void* pNext = nullptr;
+ MemoryRequirements memoryRequirements;
+ };
+ static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+
+ using MemoryRequirements2KHR = MemoryRequirements2;
+
+ struct MemoryDedicatedRequirements
+ {
+ operator const VkMemoryDedicatedRequirements&() const
+ {
+ return *reinterpret_cast<const VkMemoryDedicatedRequirements*>(this);
+ }
+
+ bool operator==( MemoryDedicatedRequirements const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
+ && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+ }
+
+ bool operator!=( MemoryDedicatedRequirements const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryDedicatedRequirements;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 prefersDedicatedAllocation;
+ Bool32 requiresDedicatedAllocation;
+ };
+ static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
+
+ using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+
+ struct MemoryDedicatedAllocateInfo
+ {
+ MemoryDedicatedAllocateInfo( Image image_ = Image(), Buffer buffer_ = Buffer() )
+ : image( image_ )
+ , buffer( buffer_ )
+ {
+ }
+
+ MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) );
+ }
+
+ MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) );
+ return *this;
+ }
+ MemoryDedicatedAllocateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryDedicatedAllocateInfo& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ MemoryDedicatedAllocateInfo& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ operator const VkMemoryDedicatedAllocateInfo&() const
+ {
+ return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>(this);
+ }
+
+ bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( image == rhs.image )
+ && ( buffer == rhs.buffer );
+ }
+
+ bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ Image image;
+ Buffer buffer;
+ };
+ static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
+
+ using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+
+ struct SamplerYcbcrConversionInfo
+ {
+ SamplerYcbcrConversionInfo( SamplerYcbcrConversion conversion_ = SamplerYcbcrConversion() )
+ : conversion( conversion_ )
+ {
+ }
+
+ SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) );
+ }
+
+ SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) );
+ return *this;
+ }
+ SamplerYcbcrConversionInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionInfo& setConversion( SamplerYcbcrConversion conversion_ )
+ {
+ conversion = conversion_;
+ return *this;
+ }
+
+ operator const VkSamplerYcbcrConversionInfo&() const
+ {
+ return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>(this);
+ }
+
+ bool operator==( SamplerYcbcrConversionInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( conversion == rhs.conversion );
+ }
+
+ bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
+
+ public:
+ const void* pNext = nullptr;
+ SamplerYcbcrConversion conversion;
+ };
+ static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
+
+ using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
+
+ struct PhysicalDeviceSamplerYcbcrConversionFeatures
+ {
+ PhysicalDeviceSamplerYcbcrConversionFeatures( Bool32 samplerYcbcrConversion_ = 0 )
+ : samplerYcbcrConversion( samplerYcbcrConversion_ )
+ {
+ }
+
+ PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) );
+ }
+
+ PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) );
+ return *this;
+ }
+ PhysicalDeviceSamplerYcbcrConversionFeatures& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceSamplerYcbcrConversionFeatures& setSamplerYcbcrConversion( Bool32 samplerYcbcrConversion_ )
+ {
+ samplerYcbcrConversion = samplerYcbcrConversion_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceSamplerYcbcrConversionFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
+ }
+
+ bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 samplerYcbcrConversion;
+ };
+ static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
+
+ struct SamplerYcbcrConversionImageFormatProperties
+ {
+ operator const VkSamplerYcbcrConversionImageFormatProperties&() const
+ {
+ return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>(this);
+ }
+
+ bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
+ }
+
+ bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t combinedImageSamplerDescriptorCount;
+ };
+ static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
+
+ using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
+
+ struct TextureLODGatherFormatPropertiesAMD
+ {
+ operator const VkTextureLODGatherFormatPropertiesAMD&() const
+ {
+ return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>(this);
+ }
+
+ bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
+ }
+
+ bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 supportsTextureGatherLODBiasAMD;
+ };
+ static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
+
+ struct ProtectedSubmitInfo
+ {
+ ProtectedSubmitInfo( Bool32 protectedSubmit_ = 0 )
+ : protectedSubmit( protectedSubmit_ )
+ {
+ }
+
+ ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) );
+ }
+
+ ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) );
+ return *this;
+ }
+ ProtectedSubmitInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ProtectedSubmitInfo& setProtectedSubmit( Bool32 protectedSubmit_ )
+ {
+ protectedSubmit = protectedSubmit_;
+ return *this;
+ }
+
+ operator const VkProtectedSubmitInfo&() const
+ {
+ return *reinterpret_cast<const VkProtectedSubmitInfo*>(this);
+ }
+
+ bool operator==( ProtectedSubmitInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( protectedSubmit == rhs.protectedSubmit );
+ }
+
+ bool operator!=( ProtectedSubmitInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eProtectedSubmitInfo;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 protectedSubmit;
+ };
+ static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceProtectedMemoryFeatures
+ {
+ PhysicalDeviceProtectedMemoryFeatures( Bool32 protectedMemory_ = 0 )
+ : protectedMemory( protectedMemory_ )
+ {
+ }
+
+ PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) );
+ }
+
+ PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) );
+ return *this;
+ }
+ PhysicalDeviceProtectedMemoryFeatures& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceProtectedMemoryFeatures& setProtectedMemory( Bool32 protectedMemory_ )
+ {
+ protectedMemory = protectedMemory_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceProtectedMemoryFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( protectedMemory == rhs.protectedMemory );
+ }
+
+ bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 protectedMemory;
+ };
+ static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceProtectedMemoryProperties
+ {
+ PhysicalDeviceProtectedMemoryProperties( Bool32 protectedNoFault_ = 0 )
+ : protectedNoFault( protectedNoFault_ )
+ {
+ }
+
+ PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) );
+ }
+
+ PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) );
+ return *this;
+ }
+ PhysicalDeviceProtectedMemoryProperties& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceProtectedMemoryProperties& setProtectedNoFault( Bool32 protectedNoFault_ )
+ {
+ protectedNoFault = protectedNoFault_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceProtectedMemoryProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( protectedNoFault == rhs.protectedNoFault );
+ }
+
+ bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 protectedNoFault;
+ };
+ static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
+
+ struct PipelineCoverageToColorStateCreateInfoNV
+ {
+ PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateFlagsNV flags_ = PipelineCoverageToColorStateCreateFlagsNV(), Bool32 coverageToColorEnable_ = 0, uint32_t coverageToColorLocation_ = 0 )
+ : flags( flags_ )
+ , coverageToColorEnable( coverageToColorEnable_ )
+ , coverageToColorLocation( coverageToColorLocation_ )
+ {
+ }
+
+ PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
+ }
+
+ PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
+ return *this;
+ }
+ PipelineCoverageToColorStateCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineCoverageToColorStateCreateInfoNV& setFlags( PipelineCoverageToColorStateCreateFlagsNV flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorEnable( Bool32 coverageToColorEnable_ )
+ {
+ coverageToColorEnable = coverageToColorEnable_;
+ return *this;
+ }
+
+ PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorLocation( uint32_t coverageToColorLocation_ )
+ {
+ coverageToColorLocation = coverageToColorLocation_;
+ return *this;
+ }
+
+ operator const VkPipelineCoverageToColorStateCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>(this);
+ }
+
+ bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( coverageToColorEnable == rhs.coverageToColorEnable )
+ && ( coverageToColorLocation == rhs.coverageToColorLocation );
+ }
+
+ bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineCoverageToColorStateCreateFlagsNV flags;
+ Bool32 coverageToColorEnable;
+ uint32_t coverageToColorLocation;
+ };
+ static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
+ {
+ operator const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+ && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
+ }
+
+ bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 filterMinmaxSingleComponentFormats;
+ Bool32 filterMinmaxImageComponentMapping;
+ };
+ static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct MultisamplePropertiesEXT
+ {
+ operator const VkMultisamplePropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkMultisamplePropertiesEXT*>(this);
+ }
+
+ bool operator==( MultisamplePropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+ }
+
+ bool operator!=( MultisamplePropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMultisamplePropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ Extent2D maxSampleLocationGridSize;
+ };
+ static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
+ {
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = 0 )
+ : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+ {
+ }
+
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
+ }
+
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
+ return *this;
+ }
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ )
+ {
+ advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
+ }
+
+ bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 advancedBlendCoherentOperations;
+ };
+ static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
+ {
+ operator const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
+ && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
+ && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
+ && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
+ && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
+ && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
+ }
+
+ bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t advancedBlendMaxColorAttachments;
+ Bool32 advancedBlendIndependentBlend;
+ Bool32 advancedBlendNonPremultipliedSrcColor;
+ Bool32 advancedBlendNonPremultipliedDstColor;
+ Bool32 advancedBlendCorrelatedOverlap;
+ Bool32 advancedBlendAllOperations;
+ };
+ static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct ImageFormatListCreateInfoKHR
+ {
+ ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0, const Format* pViewFormats_ = nullptr )
+ : viewFormatCount( viewFormatCount_ )
+ , pViewFormats( pViewFormats_ )
+ {
+ }
+
+ ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) );
+ }
+
+ ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) );
+ return *this;
+ }
+ ImageFormatListCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageFormatListCreateInfoKHR& setViewFormatCount( uint32_t viewFormatCount_ )
+ {
+ viewFormatCount = viewFormatCount_;
+ return *this;
+ }
+
+ ImageFormatListCreateInfoKHR& setPViewFormats( const Format* pViewFormats_ )
+ {
+ pViewFormats = pViewFormats_;
+ return *this;
+ }
+
+ operator const VkImageFormatListCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImageFormatListCreateInfoKHR*>(this);
+ }
+
+ bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( viewFormatCount == rhs.viewFormatCount )
+ && ( pViewFormats == rhs.pViewFormats );
+ }
+
+ bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageFormatListCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t viewFormatCount;
+ const Format* pViewFormats;
+ };
+ static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ struct ValidationCacheCreateInfoEXT
+ {
+ ValidationCacheCreateInfoEXT( ValidationCacheCreateFlagsEXT flags_ = ValidationCacheCreateFlagsEXT(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr )
+ : flags( flags_ )
+ , initialDataSize( initialDataSize_ )
+ , pInitialData( pInitialData_ )
+ {
+ }
+
+ ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) );
+ }
+
+ ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) );
+ return *this;
+ }
+ ValidationCacheCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ValidationCacheCreateInfoEXT& setFlags( ValidationCacheCreateFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ValidationCacheCreateInfoEXT& setInitialDataSize( size_t initialDataSize_ )
+ {
+ initialDataSize = initialDataSize_;
+ return *this;
+ }
+
+ ValidationCacheCreateInfoEXT& setPInitialData( const void* pInitialData_ )
+ {
+ pInitialData = pInitialData_;
+ return *this;
+ }
+
+ operator const VkValidationCacheCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>(this);
+ }
+
+ bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( initialDataSize == rhs.initialDataSize )
+ && ( pInitialData == rhs.pInitialData );
+ }
+
+ bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ ValidationCacheCreateFlagsEXT flags;
+ size_t initialDataSize;
+ const void* pInitialData;
+ };
+ static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ struct ShaderModuleValidationCacheCreateInfoEXT
+ {
+ ShaderModuleValidationCacheCreateInfoEXT( ValidationCacheEXT validationCache_ = ValidationCacheEXT() )
+ : validationCache( validationCache_ )
+ {
+ }
+
+ ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) );
+ }
+
+ ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) );
+ return *this;
+ }
+ ShaderModuleValidationCacheCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ShaderModuleValidationCacheCreateInfoEXT& setValidationCache( ValidationCacheEXT validationCache_ )
+ {
+ validationCache = validationCache_;
+ return *this;
+ }
+
+ operator const VkShaderModuleValidationCacheCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>(this);
+ }
+
+ bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( validationCache == rhs.validationCache );
+ }
+
+ bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ ValidationCacheEXT validationCache;
+ };
+ static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceMaintenance3Properties
+ {
+ operator const VkPhysicalDeviceMaintenance3Properties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+ && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+ }
+
+ bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxPerSetDescriptors;
+ DeviceSize maxMemoryAllocationSize;
+ };
+ static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+
+ struct DescriptorSetLayoutSupport
+ {
+ operator const VkDescriptorSetLayoutSupport&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>(this);
+ }
+
+ bool operator==( DescriptorSetLayoutSupport const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( supported == rhs.supported );
+ }
+
+ bool operator!=( DescriptorSetLayoutSupport const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorSetLayoutSupport;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 supported;
+ };
+ static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
+
+ using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+
+ struct PhysicalDeviceShaderDrawParameterFeatures
+ {
+ PhysicalDeviceShaderDrawParameterFeatures( Bool32 shaderDrawParameters_ = 0 )
+ : shaderDrawParameters( shaderDrawParameters_ )
+ {
+ }
+
+ PhysicalDeviceShaderDrawParameterFeatures( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) );
+ }
+
+ PhysicalDeviceShaderDrawParameterFeatures& operator=( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) );
+ return *this;
+ }
+ PhysicalDeviceShaderDrawParameterFeatures& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderDrawParameterFeatures& setShaderDrawParameters( Bool32 shaderDrawParameters_ )
+ {
+ shaderDrawParameters = shaderDrawParameters_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceShaderDrawParameterFeatures&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParameterFeatures*>(this);
+ }
+
+ bool operator==( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( shaderDrawParameters == rhs.shaderDrawParameters );
+ }
+
+ bool operator!=( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceShaderDrawParameterFeatures;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 shaderDrawParameters;
+ };
+ static_assert( sizeof( PhysicalDeviceShaderDrawParameterFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParameterFeatures ), "struct and wrapper have different size!" );
+
+ struct DebugUtilsLabelEXT
+ {
+ DebugUtilsLabelEXT( const char* pLabelName_ = nullptr, std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
+ : pLabelName( pLabelName_ )
+ {
+ memcpy( &color, color_.data(), 4 * sizeof( float ) );
+ }
+
+ DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) );
+ }
+
+ DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) );
+ return *this;
+ }
+ DebugUtilsLabelEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugUtilsLabelEXT& setPLabelName( const char* pLabelName_ )
+ {
+ pLabelName = pLabelName_;
+ return *this;
+ }
+
+ DebugUtilsLabelEXT& setColor( std::array<float,4> color_ )
+ {
+ memcpy( &color, color_.data(), 4 * sizeof( float ) );
+ return *this;
+ }
+
+ operator const VkDebugUtilsLabelEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugUtilsLabelEXT*>(this);
+ }
+
+ bool operator==( DebugUtilsLabelEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pLabelName == rhs.pLabelName )
+ && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+ }
+
+ bool operator!=( DebugUtilsLabelEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugUtilsLabelEXT;
+
+ public:
+ const void* pNext = nullptr;
+ const char* pLabelName;
+ float color[4];
+ };
+ static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+
+ struct MemoryHostPointerPropertiesEXT
+ {
+ MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = 0 )
+ : memoryTypeBits( memoryTypeBits_ )
+ {
+ }
+
+ MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) );
+ }
+
+ MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) );
+ return *this;
+ }
+ MemoryHostPointerPropertiesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryHostPointerPropertiesEXT& setMemoryTypeBits( uint32_t memoryTypeBits_ )
+ {
+ memoryTypeBits = memoryTypeBits_;
+ return *this;
+ }
+
+ operator const VkMemoryHostPointerPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>(this);
+ }
+
+ bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t memoryTypeBits;
+ };
+ static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceExternalMemoryHostPropertiesEXT
+ {
+ PhysicalDeviceExternalMemoryHostPropertiesEXT( DeviceSize minImportedHostPointerAlignment_ = 0 )
+ : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
+ {
+ }
+
+ PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) );
+ }
+
+ PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) );
+ return *this;
+ }
+ PhysicalDeviceExternalMemoryHostPropertiesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalMemoryHostPropertiesEXT& setMinImportedHostPointerAlignment( DeviceSize minImportedHostPointerAlignment_ )
+ {
+ minImportedHostPointerAlignment = minImportedHostPointerAlignment_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceExternalMemoryHostPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
+ }
+
+ bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ DeviceSize minImportedHostPointerAlignment;
+ };
+ static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceConservativeRasterizationPropertiesEXT
+ {
+ PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = 0, float maxExtraPrimitiveOverestimationSize_ = 0, float extraPrimitiveOverestimationSizeGranularity_ = 0, Bool32 primitiveUnderestimation_ = 0, Bool32 conservativePointAndLineRasterization_ = 0, Bool32 degenerateTrianglesRasterized_ = 0, Bool32 degenerateLinesRasterized_ = 0, Bool32 fullyCoveredFragmentShaderInputVariable_ = 0, Bool32 conservativeRasterizationPostDepthCoverage_ = 0 )
+ : primitiveOverestimationSize( primitiveOverestimationSize_ )
+ , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
+ , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
+ , primitiveUnderestimation( primitiveUnderestimation_ )
+ , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
+ , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
+ , degenerateLinesRasterized( degenerateLinesRasterized_ )
+ , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
+ , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
+ {
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) );
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) );
+ return *this;
+ }
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveOverestimationSize( float primitiveOverestimationSize_ )
+ {
+ primitiveOverestimationSize = primitiveOverestimationSize_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setMaxExtraPrimitiveOverestimationSize( float maxExtraPrimitiveOverestimationSize_ )
+ {
+ maxExtraPrimitiveOverestimationSize = maxExtraPrimitiveOverestimationSize_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setExtraPrimitiveOverestimationSizeGranularity( float extraPrimitiveOverestimationSizeGranularity_ )
+ {
+ extraPrimitiveOverestimationSizeGranularity = extraPrimitiveOverestimationSizeGranularity_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveUnderestimation( Bool32 primitiveUnderestimation_ )
+ {
+ primitiveUnderestimation = primitiveUnderestimation_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativePointAndLineRasterization( Bool32 conservativePointAndLineRasterization_ )
+ {
+ conservativePointAndLineRasterization = conservativePointAndLineRasterization_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateTrianglesRasterized( Bool32 degenerateTrianglesRasterized_ )
+ {
+ degenerateTrianglesRasterized = degenerateTrianglesRasterized_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateLinesRasterized( Bool32 degenerateLinesRasterized_ )
+ {
+ degenerateLinesRasterized = degenerateLinesRasterized_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setFullyCoveredFragmentShaderInputVariable( Bool32 fullyCoveredFragmentShaderInputVariable_ )
+ {
+ fullyCoveredFragmentShaderInputVariable = fullyCoveredFragmentShaderInputVariable_;
+ return *this;
+ }
+
+ PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativeRasterizationPostDepthCoverage( Bool32 conservativeRasterizationPostDepthCoverage_ )
+ {
+ conservativeRasterizationPostDepthCoverage = conservativeRasterizationPostDepthCoverage_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceConservativeRasterizationPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
+ && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
+ && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
+ && ( primitiveUnderestimation == rhs.primitiveUnderestimation )
+ && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
+ && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
+ && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
+ && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
+ && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
+ }
+
+ bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ float primitiveOverestimationSize;
+ float maxExtraPrimitiveOverestimationSize;
+ float extraPrimitiveOverestimationSizeGranularity;
+ Bool32 primitiveUnderestimation;
+ Bool32 conservativePointAndLineRasterization;
+ Bool32 degenerateTrianglesRasterized;
+ Bool32 degenerateLinesRasterized;
+ Bool32 fullyCoveredFragmentShaderInputVariable;
+ Bool32 conservativeRasterizationPostDepthCoverage;
+ };
+ static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceShaderCorePropertiesAMD
+ {
+ operator const VkPhysicalDeviceShaderCorePropertiesAMD&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>(this);
+ }
+
+ bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( shaderEngineCount == rhs.shaderEngineCount )
+ && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
+ && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
+ && ( simdPerComputeUnit == rhs.simdPerComputeUnit )
+ && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
+ && ( wavefrontSize == rhs.wavefrontSize )
+ && ( sgprsPerSimd == rhs.sgprsPerSimd )
+ && ( minSgprAllocation == rhs.minSgprAllocation )
+ && ( maxSgprAllocation == rhs.maxSgprAllocation )
+ && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
+ && ( vgprsPerSimd == rhs.vgprsPerSimd )
+ && ( minVgprAllocation == rhs.minVgprAllocation )
+ && ( maxVgprAllocation == rhs.maxVgprAllocation )
+ && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
+ }
+
+ bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t shaderEngineCount;
+ uint32_t shaderArraysPerEngineCount;
+ uint32_t computeUnitsPerShaderArray;
+ uint32_t simdPerComputeUnit;
+ uint32_t wavefrontsPerSimd;
+ uint32_t wavefrontSize;
+ uint32_t sgprsPerSimd;
+ uint32_t minSgprAllocation;
+ uint32_t maxSgprAllocation;
+ uint32_t sgprAllocationGranularity;
+ uint32_t vgprsPerSimd;
+ uint32_t minVgprAllocation;
+ uint32_t maxVgprAllocation;
+ uint32_t vgprAllocationGranularity;
+ };
+ static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceDescriptorIndexingFeaturesEXT
+ {
+ PhysicalDeviceDescriptorIndexingFeaturesEXT( Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0, Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0, Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0, Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0, Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0, Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0, Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0, Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0, Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0, Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0, Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0, Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0, Bool32 descriptorBindingPartiallyBound_ = 0, Bool32 descriptorBindingVariableDescriptorCount_ = 0, Bool32 runtimeDescriptorArray_ = 0 )
+ : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
+ , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
+ , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
+ , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
+ , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
+ , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
+ , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
+ , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
+ , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
+ , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
+ , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
+ , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
+ , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
+ , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
+ , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
+ , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
+ , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
+ , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
+ , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
+ , runtimeDescriptorArray( runtimeDescriptorArray_ )
+ {
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) );
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) );
+ return *this;
+ }
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ )
+ {
+ shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ )
+ {
+ shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ )
+ {
+ shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ )
+ {
+ shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ )
+ {
+ shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ )
+ {
+ shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ )
+ {
+ shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ )
+ {
+ shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ )
+ {
+ shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ )
+ {
+ shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ )
+ {
+ descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ )
+ {
+ descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ )
+ {
+ descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ )
+ {
+ descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ )
+ {
+ descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ )
+ {
+ descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ )
+ {
+ descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ )
+ {
+ descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ )
+ {
+ descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeaturesEXT& setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ )
+ {
+ runtimeDescriptorArray = runtimeDescriptorArray_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceDescriptorIndexingFeaturesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+ && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+ && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+ && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+ && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+ && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+ && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+ && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+ && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+ && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+ && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+ && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+ && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+ && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+ && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+ && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+ && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+ && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+ && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+ && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
+ }
+
+ bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT;
+
+ public:
+ void* pNext = nullptr;
+ Bool32 shaderInputAttachmentArrayDynamicIndexing;
+ Bool32 shaderUniformTexelBufferArrayDynamicIndexing;
+ Bool32 shaderStorageTexelBufferArrayDynamicIndexing;
+ Bool32 shaderUniformBufferArrayNonUniformIndexing;
+ Bool32 shaderSampledImageArrayNonUniformIndexing;
+ Bool32 shaderStorageBufferArrayNonUniformIndexing;
+ Bool32 shaderStorageImageArrayNonUniformIndexing;
+ Bool32 shaderInputAttachmentArrayNonUniformIndexing;
+ Bool32 shaderUniformTexelBufferArrayNonUniformIndexing;
+ Bool32 shaderStorageTexelBufferArrayNonUniformIndexing;
+ Bool32 descriptorBindingUniformBufferUpdateAfterBind;
+ Bool32 descriptorBindingSampledImageUpdateAfterBind;
+ Bool32 descriptorBindingStorageImageUpdateAfterBind;
+ Bool32 descriptorBindingStorageBufferUpdateAfterBind;
+ Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
+ Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
+ Bool32 descriptorBindingUpdateUnusedWhilePending;
+ Bool32 descriptorBindingPartiallyBound;
+ Bool32 descriptorBindingVariableDescriptorCount;
+ Bool32 runtimeDescriptorArray;
+ };
+ static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceDescriptorIndexingPropertiesEXT
+ {
+ operator const VkPhysicalDeviceDescriptorIndexingPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+ && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+ && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+ && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+ && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+ && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+ && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+ && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+ && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+ && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+ && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+ && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+ && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+ && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+ && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+ && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+ && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+ && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+ && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+ && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+ && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+ && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+ && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
+ }
+
+ bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools;
+ Bool32 shaderUniformBufferArrayNonUniformIndexingNative;
+ Bool32 shaderSampledImageArrayNonUniformIndexingNative;
+ Bool32 shaderStorageBufferArrayNonUniformIndexingNative;
+ Bool32 shaderStorageImageArrayNonUniformIndexingNative;
+ Bool32 shaderInputAttachmentArrayNonUniformIndexingNative;
+ Bool32 robustBufferAccessUpdateAfterBind;
+ Bool32 quadDivergentImplicitLod;
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
+ uint32_t maxPerStageUpdateAfterBindResources;
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers;
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
+ };
+ static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" );
+
+ struct DescriptorSetVariableDescriptorCountAllocateInfoEXT
+ {
+ DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0, const uint32_t* pDescriptorCounts_ = nullptr )
+ : descriptorSetCount( descriptorSetCount_ )
+ , pDescriptorCounts( pDescriptorCounts_ )
+ {
+ }
+
+ DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) );
+ }
+
+ DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) );
+ return *this;
+ }
+ DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DescriptorSetVariableDescriptorCountAllocateInfoEXT& setDescriptorSetCount( uint32_t descriptorSetCount_ )
+ {
+ descriptorSetCount = descriptorSetCount_;
+ return *this;
+ }
+
+ DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPDescriptorCounts( const uint32_t* pDescriptorCounts_ )
+ {
+ pDescriptorCounts = pDescriptorCounts_;
+ return *this;
+ }
+
+ operator const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>(this);
+ }
+
+ bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( descriptorSetCount == rhs.descriptorSetCount )
+ && ( pDescriptorCounts == rhs.pDescriptorCounts );
+ }
+
+ bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t descriptorSetCount;
+ const uint32_t* pDescriptorCounts;
+ };
+ static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" );
+
+ struct DescriptorSetVariableDescriptorCountLayoutSupportEXT
+ {
+ operator const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>(this);
+ }
+
+ bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
+ }
+
+ bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxVariableDescriptorCount;
+ };
+ static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" );
+
+ struct PipelineVertexInputDivisorStateCreateInfoEXT
+ {
+ PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0, const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr )
+ : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
+ , pVertexBindingDivisors( pVertexBindingDivisors_ )
+ {
+ }
+
+ PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) );
+ }
+
+ PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) );
+ return *this;
+ }
+ PipelineVertexInputDivisorStateCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineVertexInputDivisorStateCreateInfoEXT& setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ )
+ {
+ vertexBindingDivisorCount = vertexBindingDivisorCount_;
+ return *this;
+ }
+
+ PipelineVertexInputDivisorStateCreateInfoEXT& setPVertexBindingDivisors( const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ )
+ {
+ pVertexBindingDivisors = pVertexBindingDivisors_;
+ return *this;
+ }
+
+ operator const VkPipelineVertexInputDivisorStateCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>(this);
+ }
+
+ bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
+ && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
+ }
+
+ bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t vertexBindingDivisorCount;
+ const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors;
+ };
+ static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
+ {
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = 0 )
+ : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
+ {
+ }
+
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) );
+ }
+
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) );
+ return *this;
+ }
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setMaxVertexAttribDivisor( uint32_t maxVertexAttribDivisor_ )
+ {
+ maxVertexAttribDivisor = maxVertexAttribDivisor_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
+ }
+
+ bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t maxVertexAttribDivisor;
+ };
+ static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ struct ImportAndroidHardwareBufferInfoANDROID
+ {
+ ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr )
+ : buffer( buffer_ )
+ {
+ }
+
+ ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) );
+ }
+
+ ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) );
+ return *this;
+ }
+ ImportAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportAndroidHardwareBufferInfoANDROID& setBuffer( struct AHardwareBuffer* buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ operator const VkImportAndroidHardwareBufferInfoANDROID&() const
+ {
+ return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>(this);
+ }
+
+ bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( buffer == rhs.buffer );
+ }
+
+ bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+
+ public:
+ const void* pNext = nullptr;
+ struct AHardwareBuffer* buffer;
+ };
+ static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ struct AndroidHardwareBufferUsageANDROID
+ {
+ operator const VkAndroidHardwareBufferUsageANDROID&() const
+ {
+ return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>(this);
+ }
+
+ bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
+ }
+
+ bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
+
+ public:
+ void* pNext = nullptr;
+ uint64_t androidHardwareBufferUsage;
+ };
+ static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ struct AndroidHardwareBufferPropertiesANDROID
+ {
+ operator const VkAndroidHardwareBufferPropertiesANDROID&() const
+ {
+ return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>(this);
+ }
+
+ bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( allocationSize == rhs.allocationSize )
+ && ( memoryTypeBits == rhs.memoryTypeBits );
+ }
+
+ bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+
+ public:
+ void* pNext = nullptr;
+ DeviceSize allocationSize;
+ uint32_t memoryTypeBits;
+ };
+ static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ struct MemoryGetAndroidHardwareBufferInfoANDROID
+ {
+ MemoryGetAndroidHardwareBufferInfoANDROID( DeviceMemory memory_ = DeviceMemory() )
+ : memory( memory_ )
+ {
+ }
+
+ MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) );
+ }
+
+ MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) );
+ return *this;
+ }
+ MemoryGetAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryGetAndroidHardwareBufferInfoANDROID& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ operator const VkMemoryGetAndroidHardwareBufferInfoANDROID&() const
+ {
+ return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>(this);
+ }
+
+ bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory );
+ }
+
+ bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceMemory memory;
+ };
+ static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ struct ExternalFormatANDROID
+ {
+ ExternalFormatANDROID( uint64_t externalFormat_ = 0 )
+ : externalFormat( externalFormat_ )
+ {
+ }
+
+ ExternalFormatANDROID( VkExternalFormatANDROID const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) );
+ }
+
+ ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) );
+ return *this;
+ }
+ ExternalFormatANDROID& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExternalFormatANDROID& setExternalFormat( uint64_t externalFormat_ )
+ {
+ externalFormat = externalFormat_;
+ return *this;
+ }
+
+ operator const VkExternalFormatANDROID&() const
+ {
+ return *reinterpret_cast<const VkExternalFormatANDROID*>(this);
+ }
+
+ bool operator==( ExternalFormatANDROID const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( externalFormat == rhs.externalFormat );
+ }
+
+ bool operator!=( ExternalFormatANDROID const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalFormatANDROID;
+
+ public:
+ void* pNext = nullptr;
+ uint64_t externalFormat;
+ };
+ static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+ enum class SubpassContents
+ {
+ eInline = VK_SUBPASS_CONTENTS_INLINE,
+ eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
+ };
+
+ struct PresentInfoKHR
+ {
+ PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , swapchainCount( swapchainCount_ )
+ , pSwapchains( pSwapchains_ )
+ , pImageIndices( pImageIndices_ )
+ , pResults( pResults_ )
+ {
+ }
+
+ PresentInfoKHR( VkPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentInfoKHR ) );
+ }
+
+ PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PresentInfoKHR ) );
+ return *this;
+ }
+ PresentInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ {
+ waitSemaphoreCount = waitSemaphoreCount_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+ {
+ pWaitSemaphores = pWaitSemaphores_;
+ return *this;
+ }
+
+ PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+ {
+ swapchainCount = swapchainCount_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
+ {
+ pSwapchains = pSwapchains_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
+ {
+ pImageIndices = pImageIndices_;
+ return *this;
+ }
+
+ PresentInfoKHR& setPResults( Result* pResults_ )
+ {
+ pResults = pResults_;
+ return *this;
+ }
+
+ operator const VkPresentInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkPresentInfoKHR*>(this);
+ }
+
+ bool operator==( PresentInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pSwapchains == rhs.pSwapchains )
+ && ( pImageIndices == rhs.pImageIndices )
+ && ( pResults == rhs.pResults );
+ }
+
+ bool operator!=( PresentInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePresentInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t waitSemaphoreCount;
+ const Semaphore* pWaitSemaphores;
+ uint32_t swapchainCount;
+ const SwapchainKHR* pSwapchains;
+ const uint32_t* pImageIndices;
+ Result* pResults;
+ };
+ static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class DynamicState
+ {
+ eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+ eScissor = VK_DYNAMIC_STATE_SCISSOR,
+ eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+ eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+ eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+ eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+ eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+ eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+ eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+ eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+ eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+ eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT
+ };
+
+ struct PipelineDynamicStateCreateInfo
+ {
+ PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
+ : flags( flags_ )
+ , dynamicStateCount( dynamicStateCount_ )
+ , pDynamicStates( pDynamicStates_ )
+ {
+ }
+
+ PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
+ }
+
+ PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
+ return *this;
+ }
+ PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
+ {
+ dynamicStateCount = dynamicStateCount_;
+ return *this;
+ }
+
+ PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
+ {
+ pDynamicStates = pDynamicStates_;
+ return *this;
+ }
+
+ operator const VkPipelineDynamicStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( dynamicStateCount == rhs.dynamicStateCount )
+ && ( pDynamicStates == rhs.pDynamicStates );
+ }
+
+ bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineDynamicStateCreateFlags flags;
+ uint32_t dynamicStateCount;
+ const DynamicState* pDynamicStates;
+ };
+ static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class DescriptorUpdateTemplateType
+ {
+ eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+ eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+ ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
+ };
+
+ struct DescriptorUpdateTemplateCreateInfo
+ {
+ DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags flags_ = DescriptorUpdateTemplateCreateFlags(), uint32_t descriptorUpdateEntryCount_ = 0, const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr, DescriptorUpdateTemplateType templateType_ = DescriptorUpdateTemplateType::eDescriptorSet, DescriptorSetLayout descriptorSetLayout_ = DescriptorSetLayout(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, PipelineLayout pipelineLayout_ = PipelineLayout(), uint32_t set_ = 0 )
+ : flags( flags_ )
+ , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
+ , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
+ , templateType( templateType_ )
+ , descriptorSetLayout( descriptorSetLayout_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
+ , pipelineLayout( pipelineLayout_ )
+ , set( set_ )
+ {
+ }
+
+ DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) );
+ }
+
+ DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) );
+ return *this;
+ }
+ DescriptorUpdateTemplateCreateInfo& setPNext( void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setFlags( DescriptorUpdateTemplateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ )
+ {
+ descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ )
+ {
+ pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setTemplateType( DescriptorUpdateTemplateType templateType_ )
+ {
+ templateType = templateType_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ )
+ {
+ descriptorSetLayout = descriptorSetLayout_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+ {
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setPipelineLayout( PipelineLayout pipelineLayout_ )
+ {
+ pipelineLayout = pipelineLayout_;
+ return *this;
+ }
+
+ DescriptorUpdateTemplateCreateInfo& setSet( uint32_t set_ )
+ {
+ set = set_;
+ return *this;
+ }
+
+ operator const VkDescriptorUpdateTemplateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>(this);
+ }
+
+ bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
+ && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
+ && ( templateType == rhs.templateType )
+ && ( descriptorSetLayout == rhs.descriptorSetLayout )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( pipelineLayout == rhs.pipelineLayout )
+ && ( set == rhs.set );
+ }
+
+ bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+
+ public:
+ void* pNext = nullptr;
+ DescriptorUpdateTemplateCreateFlags flags;
+ uint32_t descriptorUpdateEntryCount;
+ const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries;
+ DescriptorUpdateTemplateType templateType;
+ DescriptorSetLayout descriptorSetLayout;
+ PipelineBindPoint pipelineBindPoint;
+ PipelineLayout pipelineLayout;
+ uint32_t set;
+ };
+ static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
+
+ using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+
+ enum class ObjectType
+ {
+ eUnknown = VK_OBJECT_TYPE_UNKNOWN,
+ eInstance = VK_OBJECT_TYPE_INSTANCE,
+ ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
+ eDevice = VK_OBJECT_TYPE_DEVICE,
+ eQueue = VK_OBJECT_TYPE_QUEUE,
+ eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
+ eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
+ eFence = VK_OBJECT_TYPE_FENCE,
+ eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
+ eBuffer = VK_OBJECT_TYPE_BUFFER,
+ eImage = VK_OBJECT_TYPE_IMAGE,
+ eEvent = VK_OBJECT_TYPE_EVENT,
+ eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
+ eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
+ eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
+ eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
+ ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
+ ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
+ eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
+ ePipeline = VK_OBJECT_TYPE_PIPELINE,
+ eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
+ eSampler = VK_OBJECT_TYPE_SAMPLER,
+ eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
+ eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
+ eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
+ eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
+ eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+ eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+ eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+ eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+ eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
+ eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
+ eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
+ eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
+ eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
+ eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX,
+ eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX,
+ eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
+ eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT
+ };
+
+ struct DebugUtilsObjectNameInfoEXT
+ {
+ DebugUtilsObjectNameInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = 0, const char* pObjectName_ = nullptr )
+ : objectType( objectType_ )
+ , objectHandle( objectHandle_ )
+ , pObjectName( pObjectName_ )
+ {
+ }
+
+ DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) );
+ }
+
+ DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) );
+ return *this;
+ }
+ DebugUtilsObjectNameInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugUtilsObjectNameInfoEXT& setObjectType( ObjectType objectType_ )
+ {
+ objectType = objectType_;
+ return *this;
+ }
+
+ DebugUtilsObjectNameInfoEXT& setObjectHandle( uint64_t objectHandle_ )
+ {
+ objectHandle = objectHandle_;
+ return *this;
+ }
+
+ DebugUtilsObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+ {
+ pObjectName = pObjectName_;
+ return *this;
+ }
+
+ operator const VkDebugUtilsObjectNameInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>(this);
+ }
+
+ bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectType == rhs.objectType )
+ && ( objectHandle == rhs.objectHandle )
+ && ( pObjectName == rhs.pObjectName );
+ }
+
+ bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ ObjectType objectType;
+ uint64_t objectHandle;
+ const char* pObjectName;
+ };
+ static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
+
+ struct DebugUtilsObjectTagInfoEXT
+ {
+ DebugUtilsObjectTagInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
+ : objectType( objectType_ )
+ , objectHandle( objectHandle_ )
+ , tagName( tagName_ )
+ , tagSize( tagSize_ )
+ , pTag( pTag_ )
+ {
+ }
+
+ DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) );
+ }
+
+ DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) );
+ return *this;
+ }
+ DebugUtilsObjectTagInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugUtilsObjectTagInfoEXT& setObjectType( ObjectType objectType_ )
+ {
+ objectType = objectType_;
+ return *this;
+ }
+
+ DebugUtilsObjectTagInfoEXT& setObjectHandle( uint64_t objectHandle_ )
+ {
+ objectHandle = objectHandle_;
+ return *this;
+ }
+
+ DebugUtilsObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+ {
+ tagName = tagName_;
+ return *this;
+ }
+
+ DebugUtilsObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+ {
+ tagSize = tagSize_;
+ return *this;
+ }
+
+ DebugUtilsObjectTagInfoEXT& setPTag( const void* pTag_ )
+ {
+ pTag = pTag_;
+ return *this;
+ }
+
+ operator const VkDebugUtilsObjectTagInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>(this);
+ }
+
+ bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectType == rhs.objectType )
+ && ( objectHandle == rhs.objectHandle )
+ && ( tagName == rhs.tagName )
+ && ( tagSize == rhs.tagSize )
+ && ( pTag == rhs.pTag );
+ }
+
+ bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ ObjectType objectType;
+ uint64_t objectHandle;
+ uint64_t tagName;
+ size_t tagSize;
+ const void* pTag;
+ };
+ static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
+
+ struct DebugUtilsMessengerCallbackDataEXT
+ {
+ DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_ = DebugUtilsMessengerCallbackDataFlagsEXT(), const char* pMessageIdName_ = nullptr, int32_t messageIdNumber_ = 0, const char* pMessage_ = nullptr, uint32_t queueLabelCount_ = 0, DebugUtilsLabelEXT* pQueueLabels_ = nullptr, uint32_t cmdBufLabelCount_ = 0, DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr, uint32_t objectCount_ = 0, DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr )
+ : flags( flags_ )
+ , pMessageIdName( pMessageIdName_ )
+ , messageIdNumber( messageIdNumber_ )
+ , pMessage( pMessage_ )
+ , queueLabelCount( queueLabelCount_ )
+ , pQueueLabels( pQueueLabels_ )
+ , cmdBufLabelCount( cmdBufLabelCount_ )
+ , pCmdBufLabels( pCmdBufLabels_ )
+ , objectCount( objectCount_ )
+ , pObjects( pObjects_ )
+ {
+ }
+
+ DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) );
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) );
+ return *this;
+ }
+ DebugUtilsMessengerCallbackDataEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setPMessageIdName( const char* pMessageIdName_ )
+ {
+ pMessageIdName = pMessageIdName_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setMessageIdNumber( int32_t messageIdNumber_ )
+ {
+ messageIdNumber = messageIdNumber_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setPMessage( const char* pMessage_ )
+ {
+ pMessage = pMessage_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setQueueLabelCount( uint32_t queueLabelCount_ )
+ {
+ queueLabelCount = queueLabelCount_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setPQueueLabels( DebugUtilsLabelEXT* pQueueLabels_ )
+ {
+ pQueueLabels = pQueueLabels_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setCmdBufLabelCount( uint32_t cmdBufLabelCount_ )
+ {
+ cmdBufLabelCount = cmdBufLabelCount_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setPCmdBufLabels( DebugUtilsLabelEXT* pCmdBufLabels_ )
+ {
+ pCmdBufLabels = pCmdBufLabels_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setObjectCount( uint32_t objectCount_ )
+ {
+ objectCount = objectCount_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCallbackDataEXT& setPObjects( DebugUtilsObjectNameInfoEXT* pObjects_ )
+ {
+ pObjects = pObjects_;
+ return *this;
+ }
+
+ operator const VkDebugUtilsMessengerCallbackDataEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>(this);
+ }
+
+ bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pMessageIdName == rhs.pMessageIdName )
+ && ( messageIdNumber == rhs.messageIdNumber )
+ && ( pMessage == rhs.pMessage )
+ && ( queueLabelCount == rhs.queueLabelCount )
+ && ( pQueueLabels == rhs.pQueueLabels )
+ && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
+ && ( pCmdBufLabels == rhs.pCmdBufLabels )
+ && ( objectCount == rhs.objectCount )
+ && ( pObjects == rhs.pObjects );
+ }
+
+ bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DebugUtilsMessengerCallbackDataFlagsEXT flags;
+ const char* pMessageIdName;
+ int32_t messageIdNumber;
+ const char* pMessage;
+ uint32_t queueLabelCount;
+ DebugUtilsLabelEXT* pQueueLabels;
+ uint32_t cmdBufLabelCount;
+ DebugUtilsLabelEXT* pCmdBufLabels;
+ uint32_t objectCount;
+ DebugUtilsObjectNameInfoEXT* pObjects;
+ };
+ static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
+
+ enum class QueueFlagBits
+ {
+ eGraphics = VK_QUEUE_GRAPHICS_BIT,
+ eCompute = VK_QUEUE_COMPUTE_BIT,
+ eTransfer = VK_QUEUE_TRANSFER_BIT,
+ eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
+ eProtected = VK_QUEUE_PROTECTED_BIT
+ };
+
+ using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
+
+ VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
+ {
+ return QueueFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
+ {
+ return ~( QueueFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueueFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected)
+ };
+ };
+
+ struct QueueFamilyProperties
+ {
+ operator const VkQueueFamilyProperties&() const
+ {
+ return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
+ }
+
+ bool operator==( QueueFamilyProperties const& rhs ) const
+ {
+ return ( queueFlags == rhs.queueFlags )
+ && ( queueCount == rhs.queueCount )
+ && ( timestampValidBits == rhs.timestampValidBits )
+ && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
+ }
+
+ bool operator!=( QueueFamilyProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ QueueFlags queueFlags;
+ uint32_t queueCount;
+ uint32_t timestampValidBits;
+ Extent3D minImageTransferGranularity;
+ };
+ static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
+
+ struct QueueFamilyProperties2
+ {
+ operator const VkQueueFamilyProperties2&() const
+ {
+ return *reinterpret_cast<const VkQueueFamilyProperties2*>(this);
+ }
+
+ bool operator==( QueueFamilyProperties2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( queueFamilyProperties == rhs.queueFamilyProperties );
+ }
+
+ bool operator!=( QueueFamilyProperties2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eQueueFamilyProperties2;
+
+ public:
+ void* pNext = nullptr;
+ QueueFamilyProperties queueFamilyProperties;
+ };
+ static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
+
+ using QueueFamilyProperties2KHR = QueueFamilyProperties2;
+
+ enum class DeviceQueueCreateFlagBits
+ {
+ eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
+ };
+
+ using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
+
+ VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
+ {
+ return DeviceQueueCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits )
+ {
+ return ~( DeviceQueueCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<DeviceQueueCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected)
+ };
+ };
+
+ struct DeviceQueueCreateInfo
+ {
+ DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueCount_ = 0, const float* pQueuePriorities_ = nullptr )
+ : flags( flags_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
+ , queueCount( queueCount_ )
+ , pQueuePriorities( pQueuePriorities_ )
+ {
+ }
+
+ DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) );
+ }
+
+ DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) );
+ return *this;
+ }
+ DeviceQueueCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
+ {
+ queueFamilyIndex = queueFamilyIndex_;
+ return *this;
+ }
+
+ DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ )
+ {
+ queueCount = queueCount_;
+ return *this;
+ }
+
+ DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ )
+ {
+ pQueuePriorities = pQueuePriorities_;
+ return *this;
+ }
+
+ operator const VkDeviceQueueCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceQueueCreateInfo*>(this);
+ }
+
+ bool operator==( DeviceQueueCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( queueFamilyIndex == rhs.queueFamilyIndex )
+ && ( queueCount == rhs.queueCount )
+ && ( pQueuePriorities == rhs.pQueuePriorities );
+ }
+
+ bool operator!=( DeviceQueueCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceQueueCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceQueueCreateFlags flags;
+ uint32_t queueFamilyIndex;
+ uint32_t queueCount;
+ const float* pQueuePriorities;
+ };
+ static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
+
+ struct DeviceCreateInfo
+ {
+ DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), uint32_t queueCreateInfoCount_ = 0, const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr, const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr )
+ : flags( flags_ )
+ , queueCreateInfoCount( queueCreateInfoCount_ )
+ , pQueueCreateInfos( pQueueCreateInfos_ )
+ , enabledLayerCount( enabledLayerCount_ )
+ , ppEnabledLayerNames( ppEnabledLayerNames_ )
+ , enabledExtensionCount( enabledExtensionCount_ )
+ , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+ , pEnabledFeatures( pEnabledFeatures_ )
+ {
+ }
+
+ DeviceCreateInfo( VkDeviceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceCreateInfo ) );
+ }
+
+ DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceCreateInfo ) );
+ return *this;
+ }
+ DeviceCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ )
+ {
+ queueCreateInfoCount = queueCreateInfoCount_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ )
+ {
+ pQueueCreateInfos = pQueueCreateInfos_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
+ {
+ enabledLayerCount = enabledLayerCount_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
+ {
+ ppEnabledLayerNames = ppEnabledLayerNames_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
+ {
+ enabledExtensionCount = enabledExtensionCount_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
+ {
+ ppEnabledExtensionNames = ppEnabledExtensionNames_;
+ return *this;
+ }
+
+ DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ )
+ {
+ pEnabledFeatures = pEnabledFeatures_;
+ return *this;
+ }
+
+ operator const VkDeviceCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceCreateInfo*>(this);
+ }
+
+ bool operator==( DeviceCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
+ && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
+ && ( enabledLayerCount == rhs.enabledLayerCount )
+ && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+ && ( enabledExtensionCount == rhs.enabledExtensionCount )
+ && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
+ && ( pEnabledFeatures == rhs.pEnabledFeatures );
+ }
+
+ bool operator!=( DeviceCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceCreateFlags flags;
+ uint32_t queueCreateInfoCount;
+ const DeviceQueueCreateInfo* pQueueCreateInfos;
+ uint32_t enabledLayerCount;
+ const char* const* ppEnabledLayerNames;
+ uint32_t enabledExtensionCount;
+ const char* const* ppEnabledExtensionNames;
+ const PhysicalDeviceFeatures* pEnabledFeatures;
+ };
+ static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+
+ struct DeviceQueueInfo2
+ {
+ DeviceQueueInfo2( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueIndex_ = 0 )
+ : flags( flags_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
+ , queueIndex( queueIndex_ )
+ {
+ }
+
+ DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) );
+ }
+
+ DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) );
+ return *this;
+ }
+ DeviceQueueInfo2& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceQueueInfo2& setFlags( DeviceQueueCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DeviceQueueInfo2& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
+ {
+ queueFamilyIndex = queueFamilyIndex_;
+ return *this;
+ }
+
+ DeviceQueueInfo2& setQueueIndex( uint32_t queueIndex_ )
+ {
+ queueIndex = queueIndex_;
+ return *this;
+ }
+
+ operator const VkDeviceQueueInfo2&() const
+ {
+ return *reinterpret_cast<const VkDeviceQueueInfo2*>(this);
+ }
+
+ bool operator==( DeviceQueueInfo2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( queueFamilyIndex == rhs.queueFamilyIndex )
+ && ( queueIndex == rhs.queueIndex );
+ }
+
+ bool operator!=( DeviceQueueInfo2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceQueueInfo2;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceQueueCreateFlags flags;
+ uint32_t queueFamilyIndex;
+ uint32_t queueIndex;
+ };
+ static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+
+ enum class MemoryPropertyFlagBits
+ {
+ eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+ eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+ eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+ eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+ eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
+ eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT
+ };
+
+ using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
+
+ VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
+ {
+ return MemoryPropertyFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
+ {
+ return ~( MemoryPropertyFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<MemoryPropertyFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected)
+ };
+ };
+
+ struct MemoryType
+ {
+ operator const VkMemoryType&() const
+ {
+ return *reinterpret_cast<const VkMemoryType*>(this);
+ }
+
+ bool operator==( MemoryType const& rhs ) const
+ {
+ return ( propertyFlags == rhs.propertyFlags )
+ && ( heapIndex == rhs.heapIndex );
+ }
+
+ bool operator!=( MemoryType const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ MemoryPropertyFlags propertyFlags;
+ uint32_t heapIndex;
+ };
+ static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+
+ enum class MemoryHeapFlagBits
+ {
+ eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+ eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+ eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT
+ };
+
+ using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
+
+ VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
+ {
+ return MemoryHeapFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
+ {
+ return ~( MemoryHeapFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<MemoryHeapFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance)
+ };
+ };
+
+ struct MemoryHeap
+ {
+ operator const VkMemoryHeap&() const
+ {
+ return *reinterpret_cast<const VkMemoryHeap*>(this);
+ }
+
+ bool operator==( MemoryHeap const& rhs ) const
+ {
+ return ( size == rhs.size )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( MemoryHeap const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DeviceSize size;
+ MemoryHeapFlags flags;
+ };
+ static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceMemoryProperties
+ {
+ operator const VkPhysicalDeviceMemoryProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const
+ {
+ return ( memoryTypeCount == rhs.memoryTypeCount )
+ && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 )
+ && ( memoryHeapCount == rhs.memoryHeapCount )
+ && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 );
+ }
+
+ bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t memoryTypeCount;
+ MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
+ uint32_t memoryHeapCount;
+ MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
+ };
+ static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceMemoryProperties2
+ {
+ operator const VkPhysicalDeviceMemoryProperties2&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>(this);
+ }
+
+ bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryProperties == rhs.memoryProperties );
+ }
+
+ bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+
+ public:
+ void* pNext = nullptr;
+ PhysicalDeviceMemoryProperties memoryProperties;
+ };
+ static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+
+ enum class AccessFlagBits
+ {
+ eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+ eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+ eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+ eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+ eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+ eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+ eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+ eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+ eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+ eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+ eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+ eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+ eHostRead = VK_ACCESS_HOST_READ_BIT,
+ eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+ eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+ eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+ eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
+ eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
+ eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT
+ };
+
+ using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
+
+ VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
+ {
+ return AccessFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
+ {
+ return ~( AccessFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<AccessFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT)
+ };
+ };
+
+ struct MemoryBarrier
+ {
+ MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
+ : srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ {
+ }
+
+ MemoryBarrier( VkMemoryBarrier const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryBarrier ) );
+ }
+
+ MemoryBarrier& operator=( VkMemoryBarrier const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryBarrier ) );
+ return *this;
+ }
+ MemoryBarrier& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ operator const VkMemoryBarrier&() const
+ {
+ return *reinterpret_cast<const VkMemoryBarrier*>(this);
+ }
+
+ bool operator==( MemoryBarrier const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask );
+ }
+
+ bool operator!=( MemoryBarrier const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryBarrier;
+
+ public:
+ const void* pNext = nullptr;
+ AccessFlags srcAccessMask;
+ AccessFlags dstAccessMask;
+ };
+ static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+
+ struct BufferMemoryBarrier
+ {
+ BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
+ : srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+ , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+ , buffer( buffer_ )
+ , offset( offset_ )
+ , size( size_ )
+ {
+ }
+
+ BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) );
+ }
+
+ BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) );
+ return *this;
+ }
+ BufferMemoryBarrier& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
+ {
+ srcQueueFamilyIndex = srcQueueFamilyIndex_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
+ {
+ dstQueueFamilyIndex = dstQueueFamilyIndex_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setOffset( DeviceSize offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ BufferMemoryBarrier& setSize( DeviceSize size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ operator const VkBufferMemoryBarrier&() const
+ {
+ return *reinterpret_cast<const VkBufferMemoryBarrier*>(this);
+ }
+
+ bool operator==( BufferMemoryBarrier const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+ && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+ && ( buffer == rhs.buffer )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( BufferMemoryBarrier const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBufferMemoryBarrier;
+
+ public:
+ const void* pNext = nullptr;
+ AccessFlags srcAccessMask;
+ AccessFlags dstAccessMask;
+ uint32_t srcQueueFamilyIndex;
+ uint32_t dstQueueFamilyIndex;
+ Buffer buffer;
+ DeviceSize offset;
+ DeviceSize size;
+ };
+ static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+
+ enum class BufferUsageFlagBits
+ {
+ eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+ eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+ eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+ eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+ eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+ eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT
+ };
+
+ using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
+
+ VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
+ {
+ return BufferUsageFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
+ {
+ return ~( BufferUsageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<BufferUsageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
+ };
+ };
+
+ enum class BufferCreateFlagBits
+ {
+ eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+ eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+ eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
+ eProtected = VK_BUFFER_CREATE_PROTECTED_BIT
+ };
+
+ using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
+
+ VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
+ {
+ return BufferCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
+ {
+ return ~( BufferCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<BufferCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected)
+ };
+ };
+
+ struct BufferCreateInfo
+ {
+ BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
+ : flags( flags_ )
+ , size( size_ )
+ , usage( usage_ )
+ , sharingMode( sharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ {
+ }
+
+ BufferCreateInfo( VkBufferCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferCreateInfo ) );
+ }
+
+ BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferCreateInfo ) );
+ return *this;
+ }
+ BufferCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferCreateInfo& setFlags( BufferCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ BufferCreateInfo& setSize( DeviceSize size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ BufferCreateInfo& setUsage( BufferUsageFlags usage_ )
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ BufferCreateInfo& setSharingMode( SharingMode sharingMode_ )
+ {
+ sharingMode = sharingMode_;
+ return *this;
+ }
+
+ BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+ {
+ queueFamilyIndexCount = queueFamilyIndexCount_;
+ return *this;
+ }
+
+ BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+ {
+ pQueueFamilyIndices = pQueueFamilyIndices_;
+ return *this;
+ }
+
+ operator const VkBufferCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkBufferCreateInfo*>(this);
+ }
+
+ bool operator==( BufferCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( size == rhs.size )
+ && ( usage == rhs.usage )
+ && ( sharingMode == rhs.sharingMode )
+ && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+ && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+ }
+
+ bool operator!=( BufferCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBufferCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ BufferCreateFlags flags;
+ DeviceSize size;
+ BufferUsageFlags usage;
+ SharingMode sharingMode;
+ uint32_t queueFamilyIndexCount;
+ const uint32_t* pQueueFamilyIndices;
+ };
+ static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class ShaderStageFlagBits
+ {
+ eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+ eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+ eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+ eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+ eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+ eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+ eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+ eAll = VK_SHADER_STAGE_ALL
+ };
+
+ using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
+
+ VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
+ {
+ return ShaderStageFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
+ {
+ return ~( ShaderStageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ShaderStageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
+ };
+ };
+
+ struct DescriptorSetLayoutBinding
+ {
+ DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
+ : binding( binding_ )
+ , descriptorType( descriptorType_ )
+ , descriptorCount( descriptorCount_ )
+ , stageFlags( stageFlags_ )
+ , pImmutableSamplers( pImmutableSamplers_ )
+ {
+ }
+
+ DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) );
+ }
+
+ DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) );
+ return *this;
+ }
+ DescriptorSetLayoutBinding& setBinding( uint32_t binding_ )
+ {
+ binding = binding_;
+ return *this;
+ }
+
+ DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ )
+ {
+ descriptorType = descriptorType_;
+ return *this;
+ }
+
+ DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ )
+ {
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+ DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ )
+ {
+ stageFlags = stageFlags_;
+ return *this;
+ }
+
+ DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ )
+ {
+ pImmutableSamplers = pImmutableSamplers_;
+ return *this;
+ }
+
+ operator const VkDescriptorSetLayoutBinding&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>(this);
+ }
+
+ bool operator==( DescriptorSetLayoutBinding const& rhs ) const
+ {
+ return ( binding == rhs.binding )
+ && ( descriptorType == rhs.descriptorType )
+ && ( descriptorCount == rhs.descriptorCount )
+ && ( stageFlags == rhs.stageFlags )
+ && ( pImmutableSamplers == rhs.pImmutableSamplers );
+ }
+
+ bool operator!=( DescriptorSetLayoutBinding const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t binding;
+ DescriptorType descriptorType;
+ uint32_t descriptorCount;
+ ShaderStageFlags stageFlags;
+ const Sampler* pImmutableSamplers;
+ };
+ static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
+
+ struct PipelineShaderStageCreateInfo
+ {
+ PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = ShaderModule(), const char* pName_ = nullptr, const SpecializationInfo* pSpecializationInfo_ = nullptr )
+ : flags( flags_ )
+ , stage( stage_ )
+ , module( module_ )
+ , pName( pName_ )
+ , pSpecializationInfo( pSpecializationInfo_ )
+ {
+ }
+
+ PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) );
+ }
+
+ PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) );
+ return *this;
+ }
+ PipelineShaderStageCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ )
+ {
+ stage = stage_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo& setModule( ShaderModule module_ )
+ {
+ module = module_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo& setPName( const char* pName_ )
+ {
+ pName = pName_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ )
+ {
+ pSpecializationInfo = pSpecializationInfo_;
+ return *this;
+ }
+
+ operator const VkPipelineShaderStageCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineShaderStageCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( stage == rhs.stage )
+ && ( module == rhs.module )
+ && ( pName == rhs.pName )
+ && ( pSpecializationInfo == rhs.pSpecializationInfo );
+ }
+
+ bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineShaderStageCreateFlags flags;
+ ShaderStageFlagBits stage;
+ ShaderModule module;
+ const char* pName;
+ const SpecializationInfo* pSpecializationInfo;
+ };
+ static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PushConstantRange
+ {
+ PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), uint32_t offset_ = 0, uint32_t size_ = 0 )
+ : stageFlags( stageFlags_ )
+ , offset( offset_ )
+ , size( size_ )
+ {
+ }
+
+ PushConstantRange( VkPushConstantRange const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PushConstantRange ) );
+ }
+
+ PushConstantRange& operator=( VkPushConstantRange const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PushConstantRange ) );
+ return *this;
+ }
+ PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ )
+ {
+ stageFlags = stageFlags_;
+ return *this;
+ }
+
+ PushConstantRange& setOffset( uint32_t offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ PushConstantRange& setSize( uint32_t size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ operator const VkPushConstantRange&() const
+ {
+ return *reinterpret_cast<const VkPushConstantRange*>(this);
+ }
+
+ bool operator==( PushConstantRange const& rhs ) const
+ {
+ return ( stageFlags == rhs.stageFlags )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( PushConstantRange const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ShaderStageFlags stageFlags;
+ uint32_t offset;
+ uint32_t size;
+ };
+ static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+
+ struct PipelineLayoutCreateInfo
+ {
+ PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), uint32_t setLayoutCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr, uint32_t pushConstantRangeCount_ = 0, const PushConstantRange* pPushConstantRanges_ = nullptr )
+ : flags( flags_ )
+ , setLayoutCount( setLayoutCount_ )
+ , pSetLayouts( pSetLayouts_ )
+ , pushConstantRangeCount( pushConstantRangeCount_ )
+ , pPushConstantRanges( pPushConstantRanges_ )
+ {
+ }
+
+ PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) );
+ }
+
+ PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) );
+ return *this;
+ }
+ PipelineLayoutCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ )
+ {
+ setLayoutCount = setLayoutCount_;
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
+ {
+ pSetLayouts = pSetLayouts_;
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ )
+ {
+ pushConstantRangeCount = pushConstantRangeCount_;
+ return *this;
+ }
+
+ PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ )
+ {
+ pPushConstantRanges = pPushConstantRanges_;
+ return *this;
+ }
+
+ operator const VkPipelineLayoutCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineLayoutCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( setLayoutCount == rhs.setLayoutCount )
+ && ( pSetLayouts == rhs.pSetLayouts )
+ && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
+ && ( pPushConstantRanges == rhs.pPushConstantRanges );
+ }
+
+ bool operator!=( PipelineLayoutCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineLayoutCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineLayoutCreateFlags flags;
+ uint32_t setLayoutCount;
+ const DescriptorSetLayout* pSetLayouts;
+ uint32_t pushConstantRangeCount;
+ const PushConstantRange* pPushConstantRanges;
+ };
+ static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
+
+ struct ShaderStatisticsInfoAMD
+ {
+ operator const VkShaderStatisticsInfoAMD&() const
+ {
+ return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>(this);
+ }
+
+ bool operator==( ShaderStatisticsInfoAMD const& rhs ) const
+ {
+ return ( shaderStageMask == rhs.shaderStageMask )
+ && ( resourceUsage == rhs.resourceUsage )
+ && ( numPhysicalVgprs == rhs.numPhysicalVgprs )
+ && ( numPhysicalSgprs == rhs.numPhysicalSgprs )
+ && ( numAvailableVgprs == rhs.numAvailableVgprs )
+ && ( numAvailableSgprs == rhs.numAvailableSgprs )
+ && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 );
+ }
+
+ bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ShaderStageFlags shaderStageMask;
+ ShaderResourceUsageAMD resourceUsage;
+ uint32_t numPhysicalVgprs;
+ uint32_t numPhysicalSgprs;
+ uint32_t numAvailableVgprs;
+ uint32_t numAvailableSgprs;
+ uint32_t computeWorkGroupSize[3];
+ };
+ static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
+
+ enum class ImageUsageFlagBits
+ {
+ eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+ eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+ eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+ eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+ };
+
+ using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
+
+ VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
+ {
+ return ImageUsageFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
+ {
+ return ~( ImageUsageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ImageUsageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
+ };
+ };
+
+ struct SharedPresentSurfaceCapabilitiesKHR
+ {
+ operator const VkSharedPresentSurfaceCapabilitiesKHR&() const
+ {
+ return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>(this);
+ }
+
+ bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
+ }
+
+ bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+
+ public:
+ void* pNext = nullptr;
+ ImageUsageFlags sharedPresentSupportedUsageFlags;
+ };
+ static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+ struct ImageViewUsageCreateInfo
+ {
+ ImageViewUsageCreateInfo( ImageUsageFlags usage_ = ImageUsageFlags() )
+ : usage( usage_ )
+ {
+ }
+
+ ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) );
+ }
+
+ ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) );
+ return *this;
+ }
+ ImageViewUsageCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageViewUsageCreateInfo& setUsage( ImageUsageFlags usage_ )
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ operator const VkImageViewUsageCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkImageViewUsageCreateInfo*>(this);
+ }
+
+ bool operator==( ImageViewUsageCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( usage == rhs.usage );
+ }
+
+ bool operator!=( ImageViewUsageCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageViewUsageCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ImageUsageFlags usage;
+ };
+ static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
+
+ using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+
+ enum class ImageCreateFlagBits
+ {
+ eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+ eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+ eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+ eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+ eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+ eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT,
+ eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+ eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+ e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+ e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+ eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+ eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+ eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+ eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+ eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+ eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+ eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT,
+ eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT
+ };
+
+ using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
+
+ VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
+ {
+ return ImageCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
+ {
+ return ~( ImageCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ImageCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT)
+ };
+ };
+
+ struct PhysicalDeviceImageFormatInfo2
+ {
+ PhysicalDeviceImageFormatInfo2( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), ImageCreateFlags flags_ = ImageCreateFlags() )
+ : format( format_ )
+ , type( type_ )
+ , tiling( tiling_ )
+ , usage( usage_ )
+ , flags( flags_ )
+ {
+ }
+
+ PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) );
+ }
+
+ PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) );
+ return *this;
+ }
+ PhysicalDeviceImageFormatInfo2& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceImageFormatInfo2& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ PhysicalDeviceImageFormatInfo2& setType( ImageType type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ PhysicalDeviceImageFormatInfo2& setTiling( ImageTiling tiling_ )
+ {
+ tiling = tiling_;
+ return *this;
+ }
+
+ PhysicalDeviceImageFormatInfo2& setUsage( ImageUsageFlags usage_ )
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ PhysicalDeviceImageFormatInfo2& setFlags( ImageCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceImageFormatInfo2&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>(this);
+ }
+
+ bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( format == rhs.format )
+ && ( type == rhs.type )
+ && ( tiling == rhs.tiling )
+ && ( usage == rhs.usage )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
+
+ public:
+ const void* pNext = nullptr;
+ Format format;
+ ImageType type;
+ ImageTiling tiling;
+ ImageUsageFlags usage;
+ ImageCreateFlags flags;
+ };
+ static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+
+ enum class PipelineCreateFlagBits
+ {
+ eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+ eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+ eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+ eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+ eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+ eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE,
+ eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE
+ };
+
+ using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
+
+ VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
+ {
+ return PipelineCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
+ {
+ return ~( PipelineCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<PipelineCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase)
+ };
+ };
+
+ struct ComputePipelineCreateInfo
+ {
+ ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
+ : flags( flags_ )
+ , stage( stage_ )
+ , layout( layout_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
+ {
+ }
+
+ ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) );
+ }
+
+ ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) );
+ return *this;
+ }
+ ComputePipelineCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ )
+ {
+ stage = stage_;
+ return *this;
+ }
+
+ ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ )
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
+
+ ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
+ {
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
+ }
+
+ operator const VkComputePipelineCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkComputePipelineCreateInfo*>(this);
+ }
+
+ bool operator==( ComputePipelineCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( stage == rhs.stage )
+ && ( layout == rhs.layout )
+ && ( basePipelineHandle == rhs.basePipelineHandle )
+ && ( basePipelineIndex == rhs.basePipelineIndex );
+ }
+
+ bool operator!=( ComputePipelineCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eComputePipelineCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineCreateFlags flags;
+ PipelineShaderStageCreateInfo stage;
+ PipelineLayout layout;
+ Pipeline basePipelineHandle;
+ int32_t basePipelineIndex;
+ };
+ static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class ColorComponentFlagBits
+ {
+ eR = VK_COLOR_COMPONENT_R_BIT,
+ eG = VK_COLOR_COMPONENT_G_BIT,
+ eB = VK_COLOR_COMPONENT_B_BIT,
+ eA = VK_COLOR_COMPONENT_A_BIT
+ };
+
+ using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
+
+ VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
+ {
+ return ColorComponentFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
+ {
+ return ~( ColorComponentFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ColorComponentFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
+ };
+ };
+
+ struct PipelineColorBlendAttachmentState
+ {
+ PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
+ : blendEnable( blendEnable_ )
+ , srcColorBlendFactor( srcColorBlendFactor_ )
+ , dstColorBlendFactor( dstColorBlendFactor_ )
+ , colorBlendOp( colorBlendOp_ )
+ , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
+ , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
+ , alphaBlendOp( alphaBlendOp_ )
+ , colorWriteMask( colorWriteMask_ )
+ {
+ }
+
+ PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) );
+ }
+
+ PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) );
+ return *this;
+ }
+ PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ )
+ {
+ blendEnable = blendEnable_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ )
+ {
+ srcColorBlendFactor = srcColorBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ )
+ {
+ dstColorBlendFactor = dstColorBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ )
+ {
+ colorBlendOp = colorBlendOp_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ )
+ {
+ srcAlphaBlendFactor = srcAlphaBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ )
+ {
+ dstAlphaBlendFactor = dstAlphaBlendFactor_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ )
+ {
+ alphaBlendOp = alphaBlendOp_;
+ return *this;
+ }
+
+ PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ )
+ {
+ colorWriteMask = colorWriteMask_;
+ return *this;
+ }
+
+ operator const VkPipelineColorBlendAttachmentState&() const
+ {
+ return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>(this);
+ }
+
+ bool operator==( PipelineColorBlendAttachmentState const& rhs ) const
+ {
+ return ( blendEnable == rhs.blendEnable )
+ && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+ && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+ && ( colorBlendOp == rhs.colorBlendOp )
+ && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+ && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+ && ( alphaBlendOp == rhs.alphaBlendOp )
+ && ( colorWriteMask == rhs.colorWriteMask );
+ }
+
+ bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Bool32 blendEnable;
+ BlendFactor srcColorBlendFactor;
+ BlendFactor dstColorBlendFactor;
+ BlendOp colorBlendOp;
+ BlendFactor srcAlphaBlendFactor;
+ BlendFactor dstAlphaBlendFactor;
+ BlendOp alphaBlendOp;
+ ColorComponentFlags colorWriteMask;
+ };
+ static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
+
+ struct PipelineColorBlendStateCreateInfo
+ {
+ PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), Bool32 logicOpEnable_ = 0, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = 0, const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, std::array<float,4> const& blendConstants_ = { { 0, 0, 0, 0 } } )
+ : flags( flags_ )
+ , logicOpEnable( logicOpEnable_ )
+ , logicOp( logicOp_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ {
+ memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
+ }
+
+ PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
+ }
+
+ PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
+ return *this;
+ }
+ PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ )
+ {
+ logicOpEnable = logicOpEnable_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ )
+ {
+ logicOp = logicOp_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+ {
+ attachmentCount = attachmentCount_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ )
+ {
+ pAttachments = pAttachments_;
+ return *this;
+ }
+
+ PipelineColorBlendStateCreateInfo& setBlendConstants( std::array<float,4> blendConstants_ )
+ {
+ memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
+ return *this;
+ }
+
+ operator const VkPipelineColorBlendStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( logicOpEnable == rhs.logicOpEnable )
+ && ( logicOp == rhs.logicOp )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
+ }
+
+ bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineColorBlendStateCreateFlags flags;
+ Bool32 logicOpEnable;
+ LogicOp logicOp;
+ uint32_t attachmentCount;
+ const PipelineColorBlendAttachmentState* pAttachments;
+ float blendConstants[4];
+ };
+ static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class FenceCreateFlagBits
+ {
+ eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+ };
+
+ using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
+
+ VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
+ {
+ return FenceCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
+ {
+ return ~( FenceCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<FenceCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
+ };
+ };
+
+ struct FenceCreateInfo
+ {
+ FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
+ : flags( flags_ )
+ {
+ }
+
+ FenceCreateInfo( VkFenceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FenceCreateInfo ) );
+ }
+
+ FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FenceCreateInfo ) );
+ return *this;
+ }
+ FenceCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ FenceCreateInfo& setFlags( FenceCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkFenceCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkFenceCreateInfo*>(this);
+ }
+
+ bool operator==( FenceCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( FenceCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eFenceCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ FenceCreateFlags flags;
+ };
+ static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class FormatFeatureFlagBits
+ {
+ eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+ eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+ eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+ eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+ eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+ eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+ eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+ eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+ eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+ eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+ eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+ eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+ eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+ eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+ eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+ eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+ eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+ eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+ eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+ eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT,
+ eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+ eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+ eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+ eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT
+ };
+
+ using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
+
+ VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
+ {
+ return FormatFeatureFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
+ {
+ return ~( FormatFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<FormatFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT)
+ };
+ };
+
+ struct FormatProperties
+ {
+ operator const VkFormatProperties&() const
+ {
+ return *reinterpret_cast<const VkFormatProperties*>(this);
+ }
+
+ bool operator==( FormatProperties const& rhs ) const
+ {
+ return ( linearTilingFeatures == rhs.linearTilingFeatures )
+ && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+ && ( bufferFeatures == rhs.bufferFeatures );
+ }
+
+ bool operator!=( FormatProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ FormatFeatureFlags linearTilingFeatures;
+ FormatFeatureFlags optimalTilingFeatures;
+ FormatFeatureFlags bufferFeatures;
+ };
+ static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+
+ struct FormatProperties2
+ {
+ operator const VkFormatProperties2&() const
+ {
+ return *reinterpret_cast<const VkFormatProperties2*>(this);
+ }
+
+ bool operator==( FormatProperties2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( formatProperties == rhs.formatProperties );
+ }
+
+ bool operator!=( FormatProperties2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eFormatProperties2;
+
+ public:
+ void* pNext = nullptr;
+ FormatProperties formatProperties;
+ };
+ static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+
+ using FormatProperties2KHR = FormatProperties2;
+
+ enum class QueryControlFlagBits
+ {
+ ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+ };
+
+ using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
+
+ VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
+ {
+ return QueryControlFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
+ {
+ return ~( QueryControlFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueryControlFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueryControlFlagBits::ePrecise)
+ };
+ };
+
+ enum class QueryResultFlagBits
+ {
+ e64 = VK_QUERY_RESULT_64_BIT,
+ eWait = VK_QUERY_RESULT_WAIT_BIT,
+ eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+ ePartial = VK_QUERY_RESULT_PARTIAL_BIT
+ };
+
+ using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
+
+ VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
+ {
+ return QueryResultFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
+ {
+ return ~( QueryResultFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueryResultFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
+ };
+ };
+
+ enum class CommandBufferUsageFlagBits
+ {
+ eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+ eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+ eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+ };
+
+ using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
+
+ VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
+ {
+ return CommandBufferUsageFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
+ {
+ return ~( CommandBufferUsageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandBufferUsageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
+ };
+ };
+
+ enum class QueryPipelineStatisticFlagBits
+ {
+ eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+ eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+ eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+ eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+ eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+ eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+ eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+ eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+ eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+ eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+ eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
+ };
+
+ using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
+
+ VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
+ {
+ return QueryPipelineStatisticFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
+ {
+ return ~( QueryPipelineStatisticFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
+ };
+ };
+
+ struct CommandBufferInheritanceInfo
+ {
+ CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
+ : renderPass( renderPass_ )
+ , subpass( subpass_ )
+ , framebuffer( framebuffer_ )
+ , occlusionQueryEnable( occlusionQueryEnable_ )
+ , queryFlags( queryFlags_ )
+ , pipelineStatistics( pipelineStatistics_ )
+ {
+ }
+
+ CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) );
+ }
+
+ CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) );
+ return *this;
+ }
+ CommandBufferInheritanceInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ )
+ {
+ renderPass = renderPass_;
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ )
+ {
+ subpass = subpass_;
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ )
+ {
+ framebuffer = framebuffer_;
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ )
+ {
+ occlusionQueryEnable = occlusionQueryEnable_;
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ )
+ {
+ queryFlags = queryFlags_;
+ return *this;
+ }
+
+ CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
+ {
+ pipelineStatistics = pipelineStatistics_;
+ return *this;
+ }
+
+ operator const VkCommandBufferInheritanceInfo&() const
+ {
+ return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>(this);
+ }
+
+ bool operator==( CommandBufferInheritanceInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( renderPass == rhs.renderPass )
+ && ( subpass == rhs.subpass )
+ && ( framebuffer == rhs.framebuffer )
+ && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
+ && ( queryFlags == rhs.queryFlags )
+ && ( pipelineStatistics == rhs.pipelineStatistics );
+ }
+
+ bool operator!=( CommandBufferInheritanceInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCommandBufferInheritanceInfo;
+
+ public:
+ const void* pNext = nullptr;
+ RenderPass renderPass;
+ uint32_t subpass;
+ Framebuffer framebuffer;
+ Bool32 occlusionQueryEnable;
+ QueryControlFlags queryFlags;
+ QueryPipelineStatisticFlags pipelineStatistics;
+ };
+ static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
+
+ struct CommandBufferBeginInfo
+ {
+ CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr )
+ : flags( flags_ )
+ , pInheritanceInfo( pInheritanceInfo_ )
+ {
+ }
+
+ CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) );
+ }
+
+ CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) );
+ return *this;
+ }
+ CommandBufferBeginInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ )
+ {
+ pInheritanceInfo = pInheritanceInfo_;
+ return *this;
+ }
+
+ operator const VkCommandBufferBeginInfo&() const
+ {
+ return *reinterpret_cast<const VkCommandBufferBeginInfo*>(this);
+ }
+
+ bool operator==( CommandBufferBeginInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pInheritanceInfo == rhs.pInheritanceInfo );
+ }
+
+ bool operator!=( CommandBufferBeginInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCommandBufferBeginInfo;
+
+ public:
+ const void* pNext = nullptr;
+ CommandBufferUsageFlags flags;
+ const CommandBufferInheritanceInfo* pInheritanceInfo;
+ };
+ static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+
+ struct QueryPoolCreateInfo
+ {
+ QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), QueryType queryType_ = QueryType::eOcclusion, uint32_t queryCount_ = 0, QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
+ : flags( flags_ )
+ , queryType( queryType_ )
+ , queryCount( queryCount_ )
+ , pipelineStatistics( pipelineStatistics_ )
+ {
+ }
+
+ QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) );
+ }
+
+ QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) );
+ return *this;
+ }
+ QueryPoolCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ QueryPoolCreateInfo& setQueryType( QueryType queryType_ )
+ {
+ queryType = queryType_;
+ return *this;
+ }
+
+ QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ )
+ {
+ queryCount = queryCount_;
+ return *this;
+ }
+
+ QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
+ {
+ pipelineStatistics = pipelineStatistics_;
+ return *this;
+ }
+
+ operator const VkQueryPoolCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkQueryPoolCreateInfo*>(this);
+ }
+
+ bool operator==( QueryPoolCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( queryType == rhs.queryType )
+ && ( queryCount == rhs.queryCount )
+ && ( pipelineStatistics == rhs.pipelineStatistics );
+ }
+
+ bool operator!=( QueryPoolCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eQueryPoolCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ QueryPoolCreateFlags flags;
+ QueryType queryType;
+ uint32_t queryCount;
+ QueryPipelineStatisticFlags pipelineStatistics;
+ };
+ static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class ImageAspectFlagBits
+ {
+ eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+ eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+ eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+ eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
+ ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
+ ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
+ ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
+ ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
+ ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
+ ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT
+ };
+
+ using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
+
+ VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
+ {
+ return ImageAspectFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
+ {
+ return ~( ImageAspectFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ImageAspectFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2)
+ };
+ };
+
+ struct ImageSubresource
+ {
+ ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
+ : aspectMask( aspectMask_ )
+ , mipLevel( mipLevel_ )
+ , arrayLayer( arrayLayer_ )
+ {
+ }
+
+ ImageSubresource( VkImageSubresource const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSubresource ) );
+ }
+
+ ImageSubresource& operator=( VkImageSubresource const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSubresource ) );
+ return *this;
+ }
+ ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ )
+ {
+ aspectMask = aspectMask_;
+ return *this;
+ }
+
+ ImageSubresource& setMipLevel( uint32_t mipLevel_ )
+ {
+ mipLevel = mipLevel_;
+ return *this;
+ }
+
+ ImageSubresource& setArrayLayer( uint32_t arrayLayer_ )
+ {
+ arrayLayer = arrayLayer_;
+ return *this;
+ }
+
+ operator const VkImageSubresource&() const
+ {
+ return *reinterpret_cast<const VkImageSubresource*>(this);
+ }
+
+ bool operator==( ImageSubresource const& rhs ) const
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( mipLevel == rhs.mipLevel )
+ && ( arrayLayer == rhs.arrayLayer );
+ }
+
+ bool operator!=( ImageSubresource const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageAspectFlags aspectMask;
+ uint32_t mipLevel;
+ uint32_t arrayLayer;
+ };
+ static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+
+ struct ImageSubresourceLayers
+ {
+ ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
+ : aspectMask( aspectMask_ )
+ , mipLevel( mipLevel_ )
+ , baseArrayLayer( baseArrayLayer_ )
+ , layerCount( layerCount_ )
+ {
+ }
+
+ ImageSubresourceLayers( VkImageSubresourceLayers const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) );
+ }
+
+ ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) );
+ return *this;
+ }
+ ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ )
+ {
+ aspectMask = aspectMask_;
+ return *this;
+ }
+
+ ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ )
+ {
+ mipLevel = mipLevel_;
+ return *this;
+ }
+
+ ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ )
+ {
+ baseArrayLayer = baseArrayLayer_;
+ return *this;
+ }
+
+ ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ )
+ {
+ layerCount = layerCount_;
+ return *this;
+ }
+
+ operator const VkImageSubresourceLayers&() const
+ {
+ return *reinterpret_cast<const VkImageSubresourceLayers*>(this);
+ }
+
+ bool operator==( ImageSubresourceLayers const& rhs ) const
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( mipLevel == rhs.mipLevel )
+ && ( baseArrayLayer == rhs.baseArrayLayer )
+ && ( layerCount == rhs.layerCount );
+ }
+
+ bool operator!=( ImageSubresourceLayers const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageAspectFlags aspectMask;
+ uint32_t mipLevel;
+ uint32_t baseArrayLayer;
+ uint32_t layerCount;
+ };
+ static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
+
+ struct ImageSubresourceRange
+ {
+ ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t baseMipLevel_ = 0, uint32_t levelCount_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
+ : aspectMask( aspectMask_ )
+ , baseMipLevel( baseMipLevel_ )
+ , levelCount( levelCount_ )
+ , baseArrayLayer( baseArrayLayer_ )
+ , layerCount( layerCount_ )
+ {
+ }
+
+ ImageSubresourceRange( VkImageSubresourceRange const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSubresourceRange ) );
+ }
+
+ ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageSubresourceRange ) );
+ return *this;
+ }
+ ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ )
+ {
+ aspectMask = aspectMask_;
+ return *this;
+ }
+
+ ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ )
+ {
+ baseMipLevel = baseMipLevel_;
+ return *this;
+ }
+
+ ImageSubresourceRange& setLevelCount( uint32_t levelCount_ )
+ {
+ levelCount = levelCount_;
+ return *this;
+ }
+
+ ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ )
+ {
+ baseArrayLayer = baseArrayLayer_;
+ return *this;
+ }
+
+ ImageSubresourceRange& setLayerCount( uint32_t layerCount_ )
+ {
+ layerCount = layerCount_;
+ return *this;
+ }
+
+ operator const VkImageSubresourceRange&() const
+ {
+ return *reinterpret_cast<const VkImageSubresourceRange*>(this);
+ }
+
+ bool operator==( ImageSubresourceRange const& rhs ) const
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( baseMipLevel == rhs.baseMipLevel )
+ && ( levelCount == rhs.levelCount )
+ && ( baseArrayLayer == rhs.baseArrayLayer )
+ && ( layerCount == rhs.layerCount );
+ }
+
+ bool operator!=( ImageSubresourceRange const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageAspectFlags aspectMask;
+ uint32_t baseMipLevel;
+ uint32_t levelCount;
+ uint32_t baseArrayLayer;
+ uint32_t layerCount;
+ };
+ static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+
+ struct ImageMemoryBarrier
+ {
+ ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Image image_ = Image(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
+ : srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , oldLayout( oldLayout_ )
+ , newLayout( newLayout_ )
+ , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+ , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+ , image( image_ )
+ , subresourceRange( subresourceRange_ )
+ {
+ }
+
+ ImageMemoryBarrier( VkImageMemoryBarrier const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) );
+ }
+
+ ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) );
+ return *this;
+ }
+ ImageMemoryBarrier& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ )
+ {
+ oldLayout = oldLayout_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ )
+ {
+ newLayout = newLayout_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
+ {
+ srcQueueFamilyIndex = srcQueueFamilyIndex_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
+ {
+ dstQueueFamilyIndex = dstQueueFamilyIndex_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
+ {
+ subresourceRange = subresourceRange_;
+ return *this;
+ }
+
+ operator const VkImageMemoryBarrier&() const
+ {
+ return *reinterpret_cast<const VkImageMemoryBarrier*>(this);
+ }
+
+ bool operator==( ImageMemoryBarrier const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( oldLayout == rhs.oldLayout )
+ && ( newLayout == rhs.newLayout )
+ && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+ && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+ && ( image == rhs.image )
+ && ( subresourceRange == rhs.subresourceRange );
+ }
+
+ bool operator!=( ImageMemoryBarrier const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageMemoryBarrier;
+
+ public:
+ const void* pNext = nullptr;
+ AccessFlags srcAccessMask;
+ AccessFlags dstAccessMask;
+ ImageLayout oldLayout;
+ ImageLayout newLayout;
+ uint32_t srcQueueFamilyIndex;
+ uint32_t dstQueueFamilyIndex;
+ Image image;
+ ImageSubresourceRange subresourceRange;
+ };
+ static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+
+ struct ImageViewCreateInfo
+ {
+ ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), Image image_ = Image(), ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = ComponentMapping(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
+ : flags( flags_ )
+ , image( image_ )
+ , viewType( viewType_ )
+ , format( format_ )
+ , components( components_ )
+ , subresourceRange( subresourceRange_ )
+ {
+ }
+
+ ImageViewCreateInfo( VkImageViewCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) );
+ }
+
+ ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) );
+ return *this;
+ }
+ ImageViewCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImageViewCreateInfo& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ ImageViewCreateInfo& setViewType( ImageViewType viewType_ )
+ {
+ viewType = viewType_;
+ return *this;
+ }
+
+ ImageViewCreateInfo& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ ImageViewCreateInfo& setComponents( ComponentMapping components_ )
+ {
+ components = components_;
+ return *this;
+ }
+
+ ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
+ {
+ subresourceRange = subresourceRange_;
+ return *this;
+ }
+
+ operator const VkImageViewCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkImageViewCreateInfo*>(this);
+ }
+
+ bool operator==( ImageViewCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( image == rhs.image )
+ && ( viewType == rhs.viewType )
+ && ( format == rhs.format )
+ && ( components == rhs.components )
+ && ( subresourceRange == rhs.subresourceRange );
+ }
+
+ bool operator!=( ImageViewCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageViewCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ImageViewCreateFlags flags;
+ Image image;
+ ImageViewType viewType;
+ Format format;
+ ComponentMapping components;
+ ImageSubresourceRange subresourceRange;
+ };
+ static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+
+ struct ImageCopy
+ {
+ ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
+ : srcSubresource( srcSubresource_ )
+ , srcOffset( srcOffset_ )
+ , dstSubresource( dstSubresource_ )
+ , dstOffset( dstOffset_ )
+ , extent( extent_ )
+ {
+ }
+
+ ImageCopy( VkImageCopy const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageCopy ) );
+ }
+
+ ImageCopy& operator=( VkImageCopy const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageCopy ) );
+ return *this;
+ }
+ ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageCopy& setSrcOffset( Offset3D srcOffset_ )
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageCopy& setDstOffset( Offset3D dstOffset_ )
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageCopy& setExtent( Extent3D extent_ )
+ {
+ extent = extent_;
+ return *this;
+ }
+
+ operator const VkImageCopy&() const
+ {
+ return *reinterpret_cast<const VkImageCopy*>(this);
+ }
+
+ bool operator==( ImageCopy const& rhs ) const
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageCopy const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageSubresourceLayers srcSubresource;
+ Offset3D srcOffset;
+ ImageSubresourceLayers dstSubresource;
+ Offset3D dstOffset;
+ Extent3D extent;
+ };
+ static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+
+ struct ImageBlit
+ {
+ ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& srcOffsets_ = { { Offset3D(), Offset3D() } }, ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& dstOffsets_ = { { Offset3D(), Offset3D() } } )
+ : srcSubresource( srcSubresource_ )
+ , dstSubresource( dstSubresource_ )
+ {
+ memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
+ memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
+ }
+
+ ImageBlit( VkImageBlit const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageBlit ) );
+ }
+
+ ImageBlit& operator=( VkImageBlit const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageBlit ) );
+ return *this;
+ }
+ ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageBlit& setSrcOffsets( std::array<Offset3D,2> srcOffsets_ )
+ {
+ memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
+ return *this;
+ }
+
+ ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageBlit& setDstOffsets( std::array<Offset3D,2> dstOffsets_ )
+ {
+ memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
+ return *this;
+ }
+
+ operator const VkImageBlit&() const
+ {
+ return *reinterpret_cast<const VkImageBlit*>(this);
+ }
+
+ bool operator==( ImageBlit const& rhs ) const
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 );
+ }
+
+ bool operator!=( ImageBlit const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageSubresourceLayers srcSubresource;
+ Offset3D srcOffsets[2];
+ ImageSubresourceLayers dstSubresource;
+ Offset3D dstOffsets[2];
+ };
+ static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+
+ struct BufferImageCopy
+ {
+ BufferImageCopy( DeviceSize bufferOffset_ = 0, uint32_t bufferRowLength_ = 0, uint32_t bufferImageHeight_ = 0, ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), Offset3D imageOffset_ = Offset3D(), Extent3D imageExtent_ = Extent3D() )
+ : bufferOffset( bufferOffset_ )
+ , bufferRowLength( bufferRowLength_ )
+ , bufferImageHeight( bufferImageHeight_ )
+ , imageSubresource( imageSubresource_ )
+ , imageOffset( imageOffset_ )
+ , imageExtent( imageExtent_ )
+ {
+ }
+
+ BufferImageCopy( VkBufferImageCopy const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferImageCopy ) );
+ }
+
+ BufferImageCopy& operator=( VkBufferImageCopy const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BufferImageCopy ) );
+ return *this;
+ }
+ BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ )
+ {
+ bufferOffset = bufferOffset_;
+ return *this;
+ }
+
+ BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ )
+ {
+ bufferRowLength = bufferRowLength_;
+ return *this;
+ }
+
+ BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ )
+ {
+ bufferImageHeight = bufferImageHeight_;
+ return *this;
+ }
+
+ BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ )
+ {
+ imageSubresource = imageSubresource_;
+ return *this;
+ }
+
+ BufferImageCopy& setImageOffset( Offset3D imageOffset_ )
+ {
+ imageOffset = imageOffset_;
+ return *this;
+ }
+
+ BufferImageCopy& setImageExtent( Extent3D imageExtent_ )
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+
+ operator const VkBufferImageCopy&() const
+ {
+ return *reinterpret_cast<const VkBufferImageCopy*>(this);
+ }
+
+ bool operator==( BufferImageCopy const& rhs ) const
+ {
+ return ( bufferOffset == rhs.bufferOffset )
+ && ( bufferRowLength == rhs.bufferRowLength )
+ && ( bufferImageHeight == rhs.bufferImageHeight )
+ && ( imageSubresource == rhs.imageSubresource )
+ && ( imageOffset == rhs.imageOffset )
+ && ( imageExtent == rhs.imageExtent );
+ }
+
+ bool operator!=( BufferImageCopy const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DeviceSize bufferOffset;
+ uint32_t bufferRowLength;
+ uint32_t bufferImageHeight;
+ ImageSubresourceLayers imageSubresource;
+ Offset3D imageOffset;
+ Extent3D imageExtent;
+ };
+ static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+
+ struct ImageResolve
+ {
+ ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
+ : srcSubresource( srcSubresource_ )
+ , srcOffset( srcOffset_ )
+ , dstSubresource( dstSubresource_ )
+ , dstOffset( dstOffset_ )
+ , extent( extent_ )
+ {
+ }
+
+ ImageResolve( VkImageResolve const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageResolve ) );
+ }
+
+ ImageResolve& operator=( VkImageResolve const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageResolve ) );
+ return *this;
+ }
+ ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageResolve& setSrcOffset( Offset3D srcOffset_ )
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageResolve& setDstOffset( Offset3D dstOffset_ )
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageResolve& setExtent( Extent3D extent_ )
+ {
+ extent = extent_;
+ return *this;
+ }
+
+ operator const VkImageResolve&() const
+ {
+ return *reinterpret_cast<const VkImageResolve*>(this);
+ }
+
+ bool operator==( ImageResolve const& rhs ) const
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageResolve const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageSubresourceLayers srcSubresource;
+ Offset3D srcOffset;
+ ImageSubresourceLayers dstSubresource;
+ Offset3D dstOffset;
+ Extent3D extent;
+ };
+ static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+
+ struct ClearAttachment
+ {
+ ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t colorAttachment_ = 0, ClearValue clearValue_ = ClearValue() )
+ : aspectMask( aspectMask_ )
+ , colorAttachment( colorAttachment_ )
+ , clearValue( clearValue_ )
+ {
+ }
+
+ ClearAttachment( VkClearAttachment const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ClearAttachment ) );
+ }
+
+ ClearAttachment& operator=( VkClearAttachment const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ClearAttachment ) );
+ return *this;
+ }
+ ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ )
+ {
+ aspectMask = aspectMask_;
+ return *this;
+ }
+
+ ClearAttachment& setColorAttachment( uint32_t colorAttachment_ )
+ {
+ colorAttachment = colorAttachment_;
+ return *this;
+ }
+
+ ClearAttachment& setClearValue( ClearValue clearValue_ )
+ {
+ clearValue = clearValue_;
+ return *this;
+ }
+
+ operator const VkClearAttachment&() const
+ {
+ return *reinterpret_cast<const VkClearAttachment*>(this);
+ }
+
+ ImageAspectFlags aspectMask;
+ uint32_t colorAttachment;
+ ClearValue clearValue;
+ };
+ static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+
+ struct InputAttachmentAspectReference
+ {
+ InputAttachmentAspectReference( uint32_t subpass_ = 0, uint32_t inputAttachmentIndex_ = 0, ImageAspectFlags aspectMask_ = ImageAspectFlags() )
+ : subpass( subpass_ )
+ , inputAttachmentIndex( inputAttachmentIndex_ )
+ , aspectMask( aspectMask_ )
+ {
+ }
+
+ InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) );
+ }
+
+ InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) );
+ return *this;
+ }
+ InputAttachmentAspectReference& setSubpass( uint32_t subpass_ )
+ {
+ subpass = subpass_;
+ return *this;
+ }
+
+ InputAttachmentAspectReference& setInputAttachmentIndex( uint32_t inputAttachmentIndex_ )
+ {
+ inputAttachmentIndex = inputAttachmentIndex_;
+ return *this;
+ }
+
+ InputAttachmentAspectReference& setAspectMask( ImageAspectFlags aspectMask_ )
+ {
+ aspectMask = aspectMask_;
+ return *this;
+ }
+
+ operator const VkInputAttachmentAspectReference&() const
+ {
+ return *reinterpret_cast<const VkInputAttachmentAspectReference*>(this);
+ }
+
+ bool operator==( InputAttachmentAspectReference const& rhs ) const
+ {
+ return ( subpass == rhs.subpass )
+ && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
+ && ( aspectMask == rhs.aspectMask );
+ }
+
+ bool operator!=( InputAttachmentAspectReference const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t subpass;
+ uint32_t inputAttachmentIndex;
+ ImageAspectFlags aspectMask;
+ };
+ static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
+
+ using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+
+ struct RenderPassInputAttachmentAspectCreateInfo
+ {
+ RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0, const InputAttachmentAspectReference* pAspectReferences_ = nullptr )
+ : aspectReferenceCount( aspectReferenceCount_ )
+ , pAspectReferences( pAspectReferences_ )
+ {
+ }
+
+ RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) );
+ }
+
+ RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) );
+ return *this;
+ }
+ RenderPassInputAttachmentAspectCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ RenderPassInputAttachmentAspectCreateInfo& setAspectReferenceCount( uint32_t aspectReferenceCount_ )
+ {
+ aspectReferenceCount = aspectReferenceCount_;
+ return *this;
+ }
+
+ RenderPassInputAttachmentAspectCreateInfo& setPAspectReferences( const InputAttachmentAspectReference* pAspectReferences_ )
+ {
+ pAspectReferences = pAspectReferences_;
+ return *this;
+ }
+
+ operator const VkRenderPassInputAttachmentAspectCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>(this);
+ }
+
+ bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( aspectReferenceCount == rhs.aspectReferenceCount )
+ && ( pAspectReferences == rhs.pAspectReferences );
+ }
+
+ bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t aspectReferenceCount;
+ const InputAttachmentAspectReference* pAspectReferences;
+ };
+ static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
+
+ using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+
+ struct BindImagePlaneMemoryInfo
+ {
+ BindImagePlaneMemoryInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor )
+ : planeAspect( planeAspect_ )
+ {
+ }
+
+ BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) );
+ }
+
+ BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) );
+ return *this;
+ }
+ BindImagePlaneMemoryInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindImagePlaneMemoryInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ )
+ {
+ planeAspect = planeAspect_;
+ return *this;
+ }
+
+ operator const VkBindImagePlaneMemoryInfo&() const
+ {
+ return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>(this);
+ }
+
+ bool operator==( BindImagePlaneMemoryInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( planeAspect == rhs.planeAspect );
+ }
+
+ bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ImageAspectFlagBits planeAspect;
+ };
+ static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
+
+ using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+
+ struct ImagePlaneMemoryRequirementsInfo
+ {
+ ImagePlaneMemoryRequirementsInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor )
+ : planeAspect( planeAspect_ )
+ {
+ }
+
+ ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) );
+ }
+
+ ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) );
+ return *this;
+ }
+ ImagePlaneMemoryRequirementsInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImagePlaneMemoryRequirementsInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ )
+ {
+ planeAspect = planeAspect_;
+ return *this;
+ }
+
+ operator const VkImagePlaneMemoryRequirementsInfo&() const
+ {
+ return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>(this);
+ }
+
+ bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( planeAspect == rhs.planeAspect );
+ }
+
+ bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ImageAspectFlagBits planeAspect;
+ };
+ static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
+
+ using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
+
+ enum class SparseImageFormatFlagBits
+ {
+ eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+ eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+ eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+ };
+
+ using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
+
+ VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
+ {
+ return SparseImageFormatFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
+ {
+ return ~( SparseImageFormatFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SparseImageFormatFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
+ };
+ };
+
+ struct SparseImageFormatProperties
+ {
+ operator const VkSparseImageFormatProperties&() const
+ {
+ return *reinterpret_cast<const VkSparseImageFormatProperties*>(this);
+ }
+
+ bool operator==( SparseImageFormatProperties const& rhs ) const
+ {
+ return ( aspectMask == rhs.aspectMask )
+ && ( imageGranularity == rhs.imageGranularity )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( SparseImageFormatProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageAspectFlags aspectMask;
+ Extent3D imageGranularity;
+ SparseImageFormatFlags flags;
+ };
+ static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
+
+ struct SparseImageMemoryRequirements
+ {
+ operator const VkSparseImageMemoryRequirements&() const
+ {
+ return *reinterpret_cast<const VkSparseImageMemoryRequirements*>(this);
+ }
+
+ bool operator==( SparseImageMemoryRequirements const& rhs ) const
+ {
+ return ( formatProperties == rhs.formatProperties )
+ && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
+ && ( imageMipTailSize == rhs.imageMipTailSize )
+ && ( imageMipTailOffset == rhs.imageMipTailOffset )
+ && ( imageMipTailStride == rhs.imageMipTailStride );
+ }
+
+ bool operator!=( SparseImageMemoryRequirements const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ SparseImageFormatProperties formatProperties;
+ uint32_t imageMipTailFirstLod;
+ DeviceSize imageMipTailSize;
+ DeviceSize imageMipTailOffset;
+ DeviceSize imageMipTailStride;
+ };
+ static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
+
+ struct SparseImageFormatProperties2
+ {
+ operator const VkSparseImageFormatProperties2&() const
+ {
+ return *reinterpret_cast<const VkSparseImageFormatProperties2*>(this);
+ }
+
+ bool operator==( SparseImageFormatProperties2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( properties == rhs.properties );
+ }
+
+ bool operator!=( SparseImageFormatProperties2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSparseImageFormatProperties2;
+
+ public:
+ void* pNext = nullptr;
+ SparseImageFormatProperties properties;
+ };
+ static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
+
+ using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
+
+ struct SparseImageMemoryRequirements2
+ {
+ operator const VkSparseImageMemoryRequirements2&() const
+ {
+ return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>(this);
+ }
+
+ bool operator==( SparseImageMemoryRequirements2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memoryRequirements == rhs.memoryRequirements );
+ }
+
+ bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+
+ public:
+ void* pNext = nullptr;
+ SparseImageMemoryRequirements memoryRequirements;
+ };
+ static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
+
+ using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+
+ enum class SparseMemoryBindFlagBits
+ {
+ eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+ };
+
+ using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
+
+ VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
+ {
+ return SparseMemoryBindFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
+ {
+ return ~( SparseMemoryBindFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SparseMemoryBindFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
+ };
+ };
+
+ struct SparseMemoryBind
+ {
+ SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
+ : resourceOffset( resourceOffset_ )
+ , size( size_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
+ , flags( flags_ )
+ {
+ }
+
+ SparseMemoryBind( VkSparseMemoryBind const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseMemoryBind ) );
+ }
+
+ SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseMemoryBind ) );
+ return *this;
+ }
+ SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ )
+ {
+ resourceOffset = resourceOffset_;
+ return *this;
+ }
+
+ SparseMemoryBind& setSize( DeviceSize size_ )
+ {
+ size = size_;
+ return *this;
+ }
+
+ SparseMemoryBind& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
+ {
+ memoryOffset = memoryOffset_;
+ return *this;
+ }
+
+ SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkSparseMemoryBind&() const
+ {
+ return *reinterpret_cast<const VkSparseMemoryBind*>(this);
+ }
+
+ bool operator==( SparseMemoryBind const& rhs ) const
+ {
+ return ( resourceOffset == rhs.resourceOffset )
+ && ( size == rhs.size )
+ && ( memory == rhs.memory )
+ && ( memoryOffset == rhs.memoryOffset )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( SparseMemoryBind const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DeviceSize resourceOffset;
+ DeviceSize size;
+ DeviceMemory memory;
+ DeviceSize memoryOffset;
+ SparseMemoryBindFlags flags;
+ };
+ static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+
+ struct SparseImageMemoryBind
+ {
+ SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), Offset3D offset_ = Offset3D(), Extent3D extent_ = Extent3D(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
+ : subresource( subresource_ )
+ , offset( offset_ )
+ , extent( extent_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
+ , flags( flags_ )
+ {
+ }
+
+ SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) );
+ }
+
+ SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) );
+ return *this;
+ }
+ SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ )
+ {
+ subresource = subresource_;
+ return *this;
+ }
+
+ SparseImageMemoryBind& setOffset( Offset3D offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ SparseImageMemoryBind& setExtent( Extent3D extent_ )
+ {
+ extent = extent_;
+ return *this;
+ }
+
+ SparseImageMemoryBind& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
+ {
+ memoryOffset = memoryOffset_;
+ return *this;
+ }
+
+ SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkSparseImageMemoryBind&() const
+ {
+ return *reinterpret_cast<const VkSparseImageMemoryBind*>(this);
+ }
+
+ bool operator==( SparseImageMemoryBind const& rhs ) const
+ {
+ return ( subresource == rhs.subresource )
+ && ( offset == rhs.offset )
+ && ( extent == rhs.extent )
+ && ( memory == rhs.memory )
+ && ( memoryOffset == rhs.memoryOffset )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( SparseImageMemoryBind const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageSubresource subresource;
+ Offset3D offset;
+ Extent3D extent;
+ DeviceMemory memory;
+ DeviceSize memoryOffset;
+ SparseMemoryBindFlags flags;
+ };
+ static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
+
+ struct SparseBufferMemoryBindInfo
+ {
+ SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
+ : buffer( buffer_ )
+ , bindCount( bindCount_ )
+ , pBinds( pBinds_ )
+ {
+ }
+
+ SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) );
+ }
+
+ SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) );
+ return *this;
+ }
+ SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+ {
+ bindCount = bindCount_;
+ return *this;
+ }
+
+ SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
+ {
+ pBinds = pBinds_;
+ return *this;
+ }
+
+ operator const VkSparseBufferMemoryBindInfo&() const
+ {
+ return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>(this);
+ }
+
+ bool operator==( SparseBufferMemoryBindInfo const& rhs ) const
+ {
+ return ( buffer == rhs.buffer )
+ && ( bindCount == rhs.bindCount )
+ && ( pBinds == rhs.pBinds );
+ }
+
+ bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Buffer buffer;
+ uint32_t bindCount;
+ const SparseMemoryBind* pBinds;
+ };
+ static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
+
+ struct SparseImageOpaqueMemoryBindInfo
+ {
+ SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
+ : image( image_ )
+ , bindCount( bindCount_ )
+ , pBinds( pBinds_ )
+ {
+ }
+
+ SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
+ }
+
+ SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
+ return *this;
+ }
+ SparseImageOpaqueMemoryBindInfo& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+ {
+ bindCount = bindCount_;
+ return *this;
+ }
+
+ SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
+ {
+ pBinds = pBinds_;
+ return *this;
+ }
+
+ operator const VkSparseImageOpaqueMemoryBindInfo&() const
+ {
+ return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>(this);
+ }
+
+ bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const
+ {
+ return ( image == rhs.image )
+ && ( bindCount == rhs.bindCount )
+ && ( pBinds == rhs.pBinds );
+ }
+
+ bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Image image;
+ uint32_t bindCount;
+ const SparseMemoryBind* pBinds;
+ };
+ static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
+
+ struct SparseImageMemoryBindInfo
+ {
+ SparseImageMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseImageMemoryBind* pBinds_ = nullptr )
+ : image( image_ )
+ , bindCount( bindCount_ )
+ , pBinds( pBinds_ )
+ {
+ }
+
+ SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) );
+ }
+
+ SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) );
+ return *this;
+ }
+ SparseImageMemoryBindInfo& setImage( Image image_ )
+ {
+ image = image_;
+ return *this;
+ }
+
+ SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+ {
+ bindCount = bindCount_;
+ return *this;
+ }
+
+ SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ )
+ {
+ pBinds = pBinds_;
+ return *this;
+ }
+
+ operator const VkSparseImageMemoryBindInfo&() const
+ {
+ return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>(this);
+ }
+
+ bool operator==( SparseImageMemoryBindInfo const& rhs ) const
+ {
+ return ( image == rhs.image )
+ && ( bindCount == rhs.bindCount )
+ && ( pBinds == rhs.pBinds );
+ }
+
+ bool operator!=( SparseImageMemoryBindInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Image image;
+ uint32_t bindCount;
+ const SparseImageMemoryBind* pBinds;
+ };
+ static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
+
+ struct BindSparseInfo
+ {
+ BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t bufferBindCount_ = 0, const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, uint32_t imageOpaqueBindCount_ = 0, const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, uint32_t imageBindCount_ = 0, const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , bufferBindCount( bufferBindCount_ )
+ , pBufferBinds( pBufferBinds_ )
+ , imageOpaqueBindCount( imageOpaqueBindCount_ )
+ , pImageOpaqueBinds( pImageOpaqueBinds_ )
+ , imageBindCount( imageBindCount_ )
+ , pImageBinds( pImageBinds_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphores( pSignalSemaphores_ )
+ {
+ }
+
+ BindSparseInfo( VkBindSparseInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindSparseInfo ) );
+ }
+
+ BindSparseInfo& operator=( VkBindSparseInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( BindSparseInfo ) );
+ return *this;
+ }
+ BindSparseInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ {
+ waitSemaphoreCount = waitSemaphoreCount_;
+ return *this;
+ }
+
+ BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+ {
+ pWaitSemaphores = pWaitSemaphores_;
+ return *this;
+ }
+
+ BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ )
+ {
+ bufferBindCount = bufferBindCount_;
+ return *this;
+ }
+
+ BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ )
+ {
+ pBufferBinds = pBufferBinds_;
+ return *this;
+ }
+
+ BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ )
+ {
+ imageOpaqueBindCount = imageOpaqueBindCount_;
+ return *this;
+ }
+
+ BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ )
+ {
+ pImageOpaqueBinds = pImageOpaqueBinds_;
+ return *this;
+ }
+
+ BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ )
+ {
+ imageBindCount = imageBindCount_;
+ return *this;
+ }
+
+ BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ )
+ {
+ pImageBinds = pImageBinds_;
+ return *this;
+ }
+
+ BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+ {
+ signalSemaphoreCount = signalSemaphoreCount_;
+ return *this;
+ }
+
+ BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+ {
+ pSignalSemaphores = pSignalSemaphores_;
+ return *this;
+ }
+
+ operator const VkBindSparseInfo&() const
+ {
+ return *reinterpret_cast<const VkBindSparseInfo*>(this);
+ }
+
+ bool operator==( BindSparseInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( bufferBindCount == rhs.bufferBindCount )
+ && ( pBufferBinds == rhs.pBufferBinds )
+ && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
+ && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
+ && ( imageBindCount == rhs.imageBindCount )
+ && ( pImageBinds == rhs.pImageBinds )
+ && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+ && ( pSignalSemaphores == rhs.pSignalSemaphores );
+ }
+
+ bool operator!=( BindSparseInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eBindSparseInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t waitSemaphoreCount;
+ const Semaphore* pWaitSemaphores;
+ uint32_t bufferBindCount;
+ const SparseBufferMemoryBindInfo* pBufferBinds;
+ uint32_t imageOpaqueBindCount;
+ const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
+ uint32_t imageBindCount;
+ const SparseImageMemoryBindInfo* pImageBinds;
+ uint32_t signalSemaphoreCount;
+ const Semaphore* pSignalSemaphores;
+ };
+ static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+
+ enum class PipelineStageFlagBits
+ {
+ eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+ eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+ eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+ eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+ eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+ eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+ eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+ eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+ eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+ eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+ eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+ eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+ eHost = VK_PIPELINE_STAGE_HOST_BIT,
+ eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+ eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
+ };
+
+ using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
+
+ VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
+ {
+ return PipelineStageFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
+ {
+ return ~( PipelineStageFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<PipelineStageFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
+ };
+ };
+
+ enum class CommandPoolCreateFlagBits
+ {
+ eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+ eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+ eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+ };
+
+ using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
+
+ VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
+ {
+ return CommandPoolCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
+ {
+ return ~( CommandPoolCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandPoolCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected)
+ };
+ };
+
+ struct CommandPoolCreateInfo
+ {
+ CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
+ : flags( flags_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
+ {
+ }
+
+ CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) );
+ }
+
+ CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) );
+ return *this;
+ }
+ CommandPoolCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
+ {
+ queueFamilyIndex = queueFamilyIndex_;
+ return *this;
+ }
+
+ operator const VkCommandPoolCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkCommandPoolCreateInfo*>(this);
+ }
+
+ bool operator==( CommandPoolCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( queueFamilyIndex == rhs.queueFamilyIndex );
+ }
+
+ bool operator!=( CommandPoolCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCommandPoolCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ CommandPoolCreateFlags flags;
+ uint32_t queueFamilyIndex;
+ };
+ static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class CommandPoolResetFlagBits
+ {
+ eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+ };
+
+ using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
+
+ VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
+ {
+ return CommandPoolResetFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
+ {
+ return ~( CommandPoolResetFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandPoolResetFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
+ };
+ };
+
+ enum class CommandBufferResetFlagBits
+ {
+ eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+ };
+
+ using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
+
+ VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
+ {
+ return CommandBufferResetFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
+ {
+ return ~( CommandBufferResetFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<CommandBufferResetFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
+ };
+ };
+
+ enum class SampleCountFlagBits
+ {
+ e1 = VK_SAMPLE_COUNT_1_BIT,
+ e2 = VK_SAMPLE_COUNT_2_BIT,
+ e4 = VK_SAMPLE_COUNT_4_BIT,
+ e8 = VK_SAMPLE_COUNT_8_BIT,
+ e16 = VK_SAMPLE_COUNT_16_BIT,
+ e32 = VK_SAMPLE_COUNT_32_BIT,
+ e64 = VK_SAMPLE_COUNT_64_BIT
+ };
+
+ using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
+
+ VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
+ {
+ return SampleCountFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
+ {
+ return ~( SampleCountFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SampleCountFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
+ };
+ };
+
+ struct ImageFormatProperties
+ {
+ operator const VkImageFormatProperties&() const
+ {
+ return *reinterpret_cast<const VkImageFormatProperties*>(this);
+ }
+
+ bool operator==( ImageFormatProperties const& rhs ) const
+ {
+ return ( maxExtent == rhs.maxExtent )
+ && ( maxMipLevels == rhs.maxMipLevels )
+ && ( maxArrayLayers == rhs.maxArrayLayers )
+ && ( sampleCounts == rhs.sampleCounts )
+ && ( maxResourceSize == rhs.maxResourceSize );
+ }
+
+ bool operator!=( ImageFormatProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Extent3D maxExtent;
+ uint32_t maxMipLevels;
+ uint32_t maxArrayLayers;
+ SampleCountFlags sampleCounts;
+ DeviceSize maxResourceSize;
+ };
+ static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+
+ struct ImageCreateInfo
+ {
+ ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = Extent3D(), uint32_t mipLevels_ = 0, uint32_t arrayLayers_ = 0, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, ImageLayout initialLayout_ = ImageLayout::eUndefined )
+ : flags( flags_ )
+ , imageType( imageType_ )
+ , format( format_ )
+ , extent( extent_ )
+ , mipLevels( mipLevels_ )
+ , arrayLayers( arrayLayers_ )
+ , samples( samples_ )
+ , tiling( tiling_ )
+ , usage( usage_ )
+ , sharingMode( sharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ , initialLayout( initialLayout_ )
+ {
+ }
+
+ ImageCreateInfo( VkImageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageCreateInfo ) );
+ }
+
+ ImageCreateInfo& operator=( VkImageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImageCreateInfo ) );
+ return *this;
+ }
+ ImageCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageCreateInfo& setFlags( ImageCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImageCreateInfo& setImageType( ImageType imageType_ )
+ {
+ imageType = imageType_;
+ return *this;
+ }
+
+ ImageCreateInfo& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ ImageCreateInfo& setExtent( Extent3D extent_ )
+ {
+ extent = extent_;
+ return *this;
+ }
+
+ ImageCreateInfo& setMipLevels( uint32_t mipLevels_ )
+ {
+ mipLevels = mipLevels_;
+ return *this;
+ }
+
+ ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ )
+ {
+ arrayLayers = arrayLayers_;
+ return *this;
+ }
+
+ ImageCreateInfo& setSamples( SampleCountFlagBits samples_ )
+ {
+ samples = samples_;
+ return *this;
+ }
+
+ ImageCreateInfo& setTiling( ImageTiling tiling_ )
+ {
+ tiling = tiling_;
+ return *this;
+ }
+
+ ImageCreateInfo& setUsage( ImageUsageFlags usage_ )
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ ImageCreateInfo& setSharingMode( SharingMode sharingMode_ )
+ {
+ sharingMode = sharingMode_;
+ return *this;
+ }
+
+ ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+ {
+ queueFamilyIndexCount = queueFamilyIndexCount_;
+ return *this;
+ }
+
+ ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+ {
+ pQueueFamilyIndices = pQueueFamilyIndices_;
+ return *this;
+ }
+
+ ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ )
+ {
+ initialLayout = initialLayout_;
+ return *this;
+ }
+
+ operator const VkImageCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkImageCreateInfo*>(this);
+ }
+
+ bool operator==( ImageCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( imageType == rhs.imageType )
+ && ( format == rhs.format )
+ && ( extent == rhs.extent )
+ && ( mipLevels == rhs.mipLevels )
+ && ( arrayLayers == rhs.arrayLayers )
+ && ( samples == rhs.samples )
+ && ( tiling == rhs.tiling )
+ && ( usage == rhs.usage )
+ && ( sharingMode == rhs.sharingMode )
+ && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+ && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+ && ( initialLayout == rhs.initialLayout );
+ }
+
+ bool operator!=( ImageCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ImageCreateFlags flags;
+ ImageType imageType;
+ Format format;
+ Extent3D extent;
+ uint32_t mipLevels;
+ uint32_t arrayLayers;
+ SampleCountFlagBits samples;
+ ImageTiling tiling;
+ ImageUsageFlags usage;
+ SharingMode sharingMode;
+ uint32_t queueFamilyIndexCount;
+ const uint32_t* pQueueFamilyIndices;
+ ImageLayout initialLayout;
+ };
+ static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PipelineMultisampleStateCreateInfo
+ {
+ PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = 0, float minSampleShading_ = 0, const SampleMask* pSampleMask_ = nullptr, Bool32 alphaToCoverageEnable_ = 0, Bool32 alphaToOneEnable_ = 0 )
+ : flags( flags_ )
+ , rasterizationSamples( rasterizationSamples_ )
+ , sampleShadingEnable( sampleShadingEnable_ )
+ , minSampleShading( minSampleShading_ )
+ , pSampleMask( pSampleMask_ )
+ , alphaToCoverageEnable( alphaToCoverageEnable_ )
+ , alphaToOneEnable( alphaToOneEnable_ )
+ {
+ }
+
+ PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
+ }
+
+ PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
+ return *this;
+ }
+ PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ )
+ {
+ rasterizationSamples = rasterizationSamples_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ )
+ {
+ sampleShadingEnable = sampleShadingEnable_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ )
+ {
+ minSampleShading = minSampleShading_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ )
+ {
+ pSampleMask = pSampleMask_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ )
+ {
+ alphaToCoverageEnable = alphaToCoverageEnable_;
+ return *this;
+ }
+
+ PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ )
+ {
+ alphaToOneEnable = alphaToOneEnable_;
+ return *this;
+ }
+
+ operator const VkPipelineMultisampleStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( rasterizationSamples == rhs.rasterizationSamples )
+ && ( sampleShadingEnable == rhs.sampleShadingEnable )
+ && ( minSampleShading == rhs.minSampleShading )
+ && ( pSampleMask == rhs.pSampleMask )
+ && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
+ && ( alphaToOneEnable == rhs.alphaToOneEnable );
+ }
+
+ bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineMultisampleStateCreateFlags flags;
+ SampleCountFlagBits rasterizationSamples;
+ Bool32 sampleShadingEnable;
+ float minSampleShading;
+ const SampleMask* pSampleMask;
+ Bool32 alphaToCoverageEnable;
+ Bool32 alphaToOneEnable;
+ };
+ static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
+
+ struct GraphicsPipelineCreateInfo
+ {
+ GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), uint32_t stageCount_ = 0, const PipelineShaderStageCreateInfo* pStages_ = nullptr, const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, PipelineLayout layout_ = PipelineLayout(), RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
+ : flags( flags_ )
+ , stageCount( stageCount_ )
+ , pStages( pStages_ )
+ , pVertexInputState( pVertexInputState_ )
+ , pInputAssemblyState( pInputAssemblyState_ )
+ , pTessellationState( pTessellationState_ )
+ , pViewportState( pViewportState_ )
+ , pRasterizationState( pRasterizationState_ )
+ , pMultisampleState( pMultisampleState_ )
+ , pDepthStencilState( pDepthStencilState_ )
+ , pColorBlendState( pColorBlendState_ )
+ , pDynamicState( pDynamicState_ )
+ , layout( layout_ )
+ , renderPass( renderPass_ )
+ , subpass( subpass_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
+ {
+ }
+
+ GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) );
+ }
+
+ GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) );
+ return *this;
+ }
+ GraphicsPipelineCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ )
+ {
+ stageCount = stageCount_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
+ {
+ pStages = pStages_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ )
+ {
+ pVertexInputState = pVertexInputState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ )
+ {
+ pInputAssemblyState = pInputAssemblyState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ )
+ {
+ pTessellationState = pTessellationState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ )
+ {
+ pViewportState = pViewportState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ )
+ {
+ pRasterizationState = pRasterizationState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ )
+ {
+ pMultisampleState = pMultisampleState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ )
+ {
+ pDepthStencilState = pDepthStencilState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ )
+ {
+ pColorBlendState = pColorBlendState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ )
+ {
+ pDynamicState = pDynamicState_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ )
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ )
+ {
+ renderPass = renderPass_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ )
+ {
+ subpass = subpass_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
+
+ GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
+ {
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
+ }
+
+ operator const VkGraphicsPipelineCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>(this);
+ }
+
+ bool operator==( GraphicsPipelineCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( stageCount == rhs.stageCount )
+ && ( pStages == rhs.pStages )
+ && ( pVertexInputState == rhs.pVertexInputState )
+ && ( pInputAssemblyState == rhs.pInputAssemblyState )
+ && ( pTessellationState == rhs.pTessellationState )
+ && ( pViewportState == rhs.pViewportState )
+ && ( pRasterizationState == rhs.pRasterizationState )
+ && ( pMultisampleState == rhs.pMultisampleState )
+ && ( pDepthStencilState == rhs.pDepthStencilState )
+ && ( pColorBlendState == rhs.pColorBlendState )
+ && ( pDynamicState == rhs.pDynamicState )
+ && ( layout == rhs.layout )
+ && ( renderPass == rhs.renderPass )
+ && ( subpass == rhs.subpass )
+ && ( basePipelineHandle == rhs.basePipelineHandle )
+ && ( basePipelineIndex == rhs.basePipelineIndex );
+ }
+
+ bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineCreateFlags flags;
+ uint32_t stageCount;
+ const PipelineShaderStageCreateInfo* pStages;
+ const PipelineVertexInputStateCreateInfo* pVertexInputState;
+ const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
+ const PipelineTessellationStateCreateInfo* pTessellationState;
+ const PipelineViewportStateCreateInfo* pViewportState;
+ const PipelineRasterizationStateCreateInfo* pRasterizationState;
+ const PipelineMultisampleStateCreateInfo* pMultisampleState;
+ const PipelineDepthStencilStateCreateInfo* pDepthStencilState;
+ const PipelineColorBlendStateCreateInfo* pColorBlendState;
+ const PipelineDynamicStateCreateInfo* pDynamicState;
+ PipelineLayout layout;
+ RenderPass renderPass;
+ uint32_t subpass;
+ Pipeline basePipelineHandle;
+ int32_t basePipelineIndex;
+ };
+ static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceLimits
+ {
+ operator const VkPhysicalDeviceLimits&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceLimits*>(this);
+ }
+
+ bool operator==( PhysicalDeviceLimits const& rhs ) const
+ {
+ return ( maxImageDimension1D == rhs.maxImageDimension1D )
+ && ( maxImageDimension2D == rhs.maxImageDimension2D )
+ && ( maxImageDimension3D == rhs.maxImageDimension3D )
+ && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
+ && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
+ && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
+ && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
+ && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
+ && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
+ && ( bufferImageGranularity == rhs.bufferImageGranularity )
+ && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
+ && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
+ && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
+ && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
+ && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
+ && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
+ && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
+ && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
+ && ( maxPerStageResources == rhs.maxPerStageResources )
+ && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
+ && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
+ && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
+ && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
+ && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
+ && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
+ && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
+ && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
+ && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
+ && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
+ && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
+ && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
+ && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
+ && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
+ && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
+ && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
+ && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
+ && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
+ && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
+ && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
+ && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
+ && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
+ && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
+ && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
+ && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
+ && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
+ && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
+ && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
+ && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
+ && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
+ && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
+ && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
+ && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
+ && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+ && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
+ && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
+ && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
+ && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
+ && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
+ && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
+ && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
+ && ( maxViewports == rhs.maxViewports )
+ && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
+ && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
+ && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
+ && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
+ && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
+ && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
+ && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
+ && ( minTexelOffset == rhs.minTexelOffset )
+ && ( maxTexelOffset == rhs.maxTexelOffset )
+ && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
+ && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
+ && ( minInterpolationOffset == rhs.minInterpolationOffset )
+ && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
+ && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
+ && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
+ && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
+ && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
+ && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
+ && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
+ && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
+ && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
+ && ( maxColorAttachments == rhs.maxColorAttachments )
+ && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
+ && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
+ && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
+ && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
+ && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
+ && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
+ && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
+ && ( timestampPeriod == rhs.timestampPeriod )
+ && ( maxClipDistances == rhs.maxClipDistances )
+ && ( maxCullDistances == rhs.maxCullDistances )
+ && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
+ && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
+ && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
+ && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
+ && ( pointSizeGranularity == rhs.pointSizeGranularity )
+ && ( lineWidthGranularity == rhs.lineWidthGranularity )
+ && ( strictLines == rhs.strictLines )
+ && ( standardSampleLocations == rhs.standardSampleLocations )
+ && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
+ && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
+ && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
+ }
+
+ bool operator!=( PhysicalDeviceLimits const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t maxImageDimension1D;
+ uint32_t maxImageDimension2D;
+ uint32_t maxImageDimension3D;
+ uint32_t maxImageDimensionCube;
+ uint32_t maxImageArrayLayers;
+ uint32_t maxTexelBufferElements;
+ uint32_t maxUniformBufferRange;
+ uint32_t maxStorageBufferRange;
+ uint32_t maxPushConstantsSize;
+ uint32_t maxMemoryAllocationCount;
+ uint32_t maxSamplerAllocationCount;
+ DeviceSize bufferImageGranularity;
+ DeviceSize sparseAddressSpaceSize;
+ uint32_t maxBoundDescriptorSets;
+ uint32_t maxPerStageDescriptorSamplers;
+ uint32_t maxPerStageDescriptorUniformBuffers;
+ uint32_t maxPerStageDescriptorStorageBuffers;
+ uint32_t maxPerStageDescriptorSampledImages;
+ uint32_t maxPerStageDescriptorStorageImages;
+ uint32_t maxPerStageDescriptorInputAttachments;
+ uint32_t maxPerStageResources;
+ uint32_t maxDescriptorSetSamplers;
+ uint32_t maxDescriptorSetUniformBuffers;
+ uint32_t maxDescriptorSetUniformBuffersDynamic;
+ uint32_t maxDescriptorSetStorageBuffers;
+ uint32_t maxDescriptorSetStorageBuffersDynamic;
+ uint32_t maxDescriptorSetSampledImages;
+ uint32_t maxDescriptorSetStorageImages;
+ uint32_t maxDescriptorSetInputAttachments;
+ uint32_t maxVertexInputAttributes;
+ uint32_t maxVertexInputBindings;
+ uint32_t maxVertexInputAttributeOffset;
+ uint32_t maxVertexInputBindingStride;
+ uint32_t maxVertexOutputComponents;
+ uint32_t maxTessellationGenerationLevel;
+ uint32_t maxTessellationPatchSize;
+ uint32_t maxTessellationControlPerVertexInputComponents;
+ uint32_t maxTessellationControlPerVertexOutputComponents;
+ uint32_t maxTessellationControlPerPatchOutputComponents;
+ uint32_t maxTessellationControlTotalOutputComponents;
+ uint32_t maxTessellationEvaluationInputComponents;
+ uint32_t maxTessellationEvaluationOutputComponents;
+ uint32_t maxGeometryShaderInvocations;
+ uint32_t maxGeometryInputComponents;
+ uint32_t maxGeometryOutputComponents;
+ uint32_t maxGeometryOutputVertices;
+ uint32_t maxGeometryTotalOutputComponents;
+ uint32_t maxFragmentInputComponents;
+ uint32_t maxFragmentOutputAttachments;
+ uint32_t maxFragmentDualSrcAttachments;
+ uint32_t maxFragmentCombinedOutputResources;
+ uint32_t maxComputeSharedMemorySize;
+ uint32_t maxComputeWorkGroupCount[3];
+ uint32_t maxComputeWorkGroupInvocations;
+ uint32_t maxComputeWorkGroupSize[3];
+ uint32_t subPixelPrecisionBits;
+ uint32_t subTexelPrecisionBits;
+ uint32_t mipmapPrecisionBits;
+ uint32_t maxDrawIndexedIndexValue;
+ uint32_t maxDrawIndirectCount;
+ float maxSamplerLodBias;
+ float maxSamplerAnisotropy;
+ uint32_t maxViewports;
+ uint32_t maxViewportDimensions[2];
+ float viewportBoundsRange[2];
+ uint32_t viewportSubPixelBits;
+ size_t minMemoryMapAlignment;
+ DeviceSize minTexelBufferOffsetAlignment;
+ DeviceSize minUniformBufferOffsetAlignment;
+ DeviceSize minStorageBufferOffsetAlignment;
+ int32_t minTexelOffset;
+ uint32_t maxTexelOffset;
+ int32_t minTexelGatherOffset;
+ uint32_t maxTexelGatherOffset;
+ float minInterpolationOffset;
+ float maxInterpolationOffset;
+ uint32_t subPixelInterpolationOffsetBits;
+ uint32_t maxFramebufferWidth;
+ uint32_t maxFramebufferHeight;
+ uint32_t maxFramebufferLayers;
+ SampleCountFlags framebufferColorSampleCounts;
+ SampleCountFlags framebufferDepthSampleCounts;
+ SampleCountFlags framebufferStencilSampleCounts;
+ SampleCountFlags framebufferNoAttachmentsSampleCounts;
+ uint32_t maxColorAttachments;
+ SampleCountFlags sampledImageColorSampleCounts;
+ SampleCountFlags sampledImageIntegerSampleCounts;
+ SampleCountFlags sampledImageDepthSampleCounts;
+ SampleCountFlags sampledImageStencilSampleCounts;
+ SampleCountFlags storageImageSampleCounts;
+ uint32_t maxSampleMaskWords;
+ Bool32 timestampComputeAndGraphics;
+ float timestampPeriod;
+ uint32_t maxClipDistances;
+ uint32_t maxCullDistances;
+ uint32_t maxCombinedClipAndCullDistances;
+ uint32_t discreteQueuePriorities;
+ float pointSizeRange[2];
+ float lineWidthRange[2];
+ float pointSizeGranularity;
+ float lineWidthGranularity;
+ Bool32 strictLines;
+ Bool32 standardSampleLocations;
+ DeviceSize optimalBufferCopyOffsetAlignment;
+ DeviceSize optimalBufferCopyRowPitchAlignment;
+ DeviceSize nonCoherentAtomSize;
+ };
+ static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceProperties
+ {
+ operator const VkPhysicalDeviceProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceProperties const& rhs ) const
+ {
+ return ( apiVersion == rhs.apiVersion )
+ && ( driverVersion == rhs.driverVersion )
+ && ( vendorID == rhs.vendorID )
+ && ( deviceID == rhs.deviceID )
+ && ( deviceType == rhs.deviceType )
+ && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( limits == rhs.limits )
+ && ( sparseProperties == rhs.sparseProperties );
+ }
+
+ bool operator!=( PhysicalDeviceProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t apiVersion;
+ uint32_t driverVersion;
+ uint32_t vendorID;
+ uint32_t deviceID;
+ PhysicalDeviceType deviceType;
+ char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+ uint8_t pipelineCacheUUID[VK_UUID_SIZE];
+ PhysicalDeviceLimits limits;
+ PhysicalDeviceSparseProperties sparseProperties;
+ };
+ static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceProperties2
+ {
+ operator const VkPhysicalDeviceProperties2&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceProperties2*>(this);
+ }
+
+ bool operator==( PhysicalDeviceProperties2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( properties == rhs.properties );
+ }
+
+ bool operator!=( PhysicalDeviceProperties2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceProperties2;
+
+ public:
+ void* pNext = nullptr;
+ PhysicalDeviceProperties properties;
+ };
+ static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
+
+ struct ImageFormatProperties2
+ {
+ operator const VkImageFormatProperties2&() const
+ {
+ return *reinterpret_cast<const VkImageFormatProperties2*>(this);
+ }
+
+ bool operator==( ImageFormatProperties2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( imageFormatProperties == rhs.imageFormatProperties );
+ }
+
+ bool operator!=( ImageFormatProperties2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImageFormatProperties2;
+
+ public:
+ void* pNext = nullptr;
+ ImageFormatProperties imageFormatProperties;
+ };
+ static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
+
+ using ImageFormatProperties2KHR = ImageFormatProperties2;
+
+ struct PhysicalDeviceSparseImageFormatInfo2
+ {
+ PhysicalDeviceSparseImageFormatInfo2( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageUsageFlags usage_ = ImageUsageFlags(), ImageTiling tiling_ = ImageTiling::eOptimal )
+ : format( format_ )
+ , type( type_ )
+ , samples( samples_ )
+ , usage( usage_ )
+ , tiling( tiling_ )
+ {
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) );
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) );
+ return *this;
+ }
+ PhysicalDeviceSparseImageFormatInfo2& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2& setType( ImageType type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2& setSamples( SampleCountFlagBits samples_ )
+ {
+ samples = samples_;
+ return *this;
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2& setUsage( ImageUsageFlags usage_ )
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ PhysicalDeviceSparseImageFormatInfo2& setTiling( ImageTiling tiling_ )
+ {
+ tiling = tiling_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceSparseImageFormatInfo2&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( format == rhs.format )
+ && ( type == rhs.type )
+ && ( samples == rhs.samples )
+ && ( usage == rhs.usage )
+ && ( tiling == rhs.tiling );
+ }
+
+ bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+
+ public:
+ const void* pNext = nullptr;
+ Format format;
+ ImageType type;
+ SampleCountFlagBits samples;
+ ImageUsageFlags usage;
+ ImageTiling tiling;
+ };
+ static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
+
+ struct SampleLocationsInfoEXT
+ {
+ SampleLocationsInfoEXT( SampleCountFlagBits sampleLocationsPerPixel_ = SampleCountFlagBits::e1, Extent2D sampleLocationGridSize_ = Extent2D(), uint32_t sampleLocationsCount_ = 0, const SampleLocationEXT* pSampleLocations_ = nullptr )
+ : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
+ , sampleLocationGridSize( sampleLocationGridSize_ )
+ , sampleLocationsCount( sampleLocationsCount_ )
+ , pSampleLocations( pSampleLocations_ )
+ {
+ }
+
+ SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) );
+ }
+
+ SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) );
+ return *this;
+ }
+ SampleLocationsInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SampleLocationsInfoEXT& setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ )
+ {
+ sampleLocationsPerPixel = sampleLocationsPerPixel_;
+ return *this;
+ }
+
+ SampleLocationsInfoEXT& setSampleLocationGridSize( Extent2D sampleLocationGridSize_ )
+ {
+ sampleLocationGridSize = sampleLocationGridSize_;
+ return *this;
+ }
+
+ SampleLocationsInfoEXT& setSampleLocationsCount( uint32_t sampleLocationsCount_ )
+ {
+ sampleLocationsCount = sampleLocationsCount_;
+ return *this;
+ }
+
+ SampleLocationsInfoEXT& setPSampleLocations( const SampleLocationEXT* pSampleLocations_ )
+ {
+ pSampleLocations = pSampleLocations_;
+ return *this;
+ }
+
+ operator const VkSampleLocationsInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkSampleLocationsInfoEXT*>(this);
+ }
+
+ bool operator==( SampleLocationsInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
+ && ( sampleLocationGridSize == rhs.sampleLocationGridSize )
+ && ( sampleLocationsCount == rhs.sampleLocationsCount )
+ && ( pSampleLocations == rhs.pSampleLocations );
+ }
+
+ bool operator!=( SampleLocationsInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSampleLocationsInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ SampleCountFlagBits sampleLocationsPerPixel;
+ Extent2D sampleLocationGridSize;
+ uint32_t sampleLocationsCount;
+ const SampleLocationEXT* pSampleLocations;
+ };
+ static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
+
+ struct AttachmentSampleLocationsEXT
+ {
+ AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0, SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() )
+ : attachmentIndex( attachmentIndex_ )
+ , sampleLocationsInfo( sampleLocationsInfo_ )
+ {
+ }
+
+ AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) );
+ }
+
+ AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) );
+ return *this;
+ }
+ AttachmentSampleLocationsEXT& setAttachmentIndex( uint32_t attachmentIndex_ )
+ {
+ attachmentIndex = attachmentIndex_;
+ return *this;
+ }
+
+ AttachmentSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ )
+ {
+ sampleLocationsInfo = sampleLocationsInfo_;
+ return *this;
+ }
+
+ operator const VkAttachmentSampleLocationsEXT&() const
+ {
+ return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>(this);
+ }
+
+ bool operator==( AttachmentSampleLocationsEXT const& rhs ) const
+ {
+ return ( attachmentIndex == rhs.attachmentIndex )
+ && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+ }
+
+ bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t attachmentIndex;
+ SampleLocationsInfoEXT sampleLocationsInfo;
+ };
+ static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
+
+ struct SubpassSampleLocationsEXT
+ {
+ SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0, SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() )
+ : subpassIndex( subpassIndex_ )
+ , sampleLocationsInfo( sampleLocationsInfo_ )
+ {
+ }
+
+ SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) );
+ }
+
+ SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) );
+ return *this;
+ }
+ SubpassSampleLocationsEXT& setSubpassIndex( uint32_t subpassIndex_ )
+ {
+ subpassIndex = subpassIndex_;
+ return *this;
+ }
+
+ SubpassSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ )
+ {
+ sampleLocationsInfo = sampleLocationsInfo_;
+ return *this;
+ }
+
+ operator const VkSubpassSampleLocationsEXT&() const
+ {
+ return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>(this);
+ }
+
+ bool operator==( SubpassSampleLocationsEXT const& rhs ) const
+ {
+ return ( subpassIndex == rhs.subpassIndex )
+ && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+ }
+
+ bool operator!=( SubpassSampleLocationsEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t subpassIndex;
+ SampleLocationsInfoEXT sampleLocationsInfo;
+ };
+ static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
+
+ struct RenderPassSampleLocationsBeginInfoEXT
+ {
+ RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0, const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr, uint32_t postSubpassSampleLocationsCount_ = 0, const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr )
+ : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
+ , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
+ , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
+ , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
+ {
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) );
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) );
+ return *this;
+ }
+ RenderPassSampleLocationsBeginInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT& setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ )
+ {
+ attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
+ return *this;
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT& setPAttachmentInitialSampleLocations( const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ )
+ {
+ pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
+ return *this;
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT& setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ )
+ {
+ postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
+ return *this;
+ }
+
+ RenderPassSampleLocationsBeginInfoEXT& setPPostSubpassSampleLocations( const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ )
+ {
+ pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
+ return *this;
+ }
+
+ operator const VkRenderPassSampleLocationsBeginInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>(this);
+ }
+
+ bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
+ && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
+ && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
+ && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
+ }
+
+ bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t attachmentInitialSampleLocationsCount;
+ const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations;
+ uint32_t postSubpassSampleLocationsCount;
+ const SubpassSampleLocationsEXT* pPostSubpassSampleLocations;
+ };
+ static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
+
+ struct PipelineSampleLocationsStateCreateInfoEXT
+ {
+ PipelineSampleLocationsStateCreateInfoEXT( Bool32 sampleLocationsEnable_ = 0, SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() )
+ : sampleLocationsEnable( sampleLocationsEnable_ )
+ , sampleLocationsInfo( sampleLocationsInfo_ )
+ {
+ }
+
+ PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) );
+ }
+
+ PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) );
+ return *this;
+ }
+ PipelineSampleLocationsStateCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsEnable( Bool32 sampleLocationsEnable_ )
+ {
+ sampleLocationsEnable = sampleLocationsEnable_;
+ return *this;
+ }
+
+ PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ )
+ {
+ sampleLocationsInfo = sampleLocationsInfo_;
+ return *this;
+ }
+
+ operator const VkPipelineSampleLocationsStateCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>(this);
+ }
+
+ bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( sampleLocationsEnable == rhs.sampleLocationsEnable )
+ && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+ }
+
+ bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 sampleLocationsEnable;
+ SampleLocationsInfoEXT sampleLocationsInfo;
+ };
+ static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceSampleLocationsPropertiesEXT
+ {
+ operator const VkPhysicalDeviceSampleLocationsPropertiesEXT&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
+ && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
+ && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 )
+ && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
+ && ( variableSampleLocations == rhs.variableSampleLocations );
+ }
+
+ bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+
+ public:
+ void* pNext = nullptr;
+ SampleCountFlags sampleLocationSampleCounts;
+ Extent2D maxSampleLocationGridSize;
+ float sampleLocationCoordinateRange[2];
+ uint32_t sampleLocationSubPixelBits;
+ Bool32 variableSampleLocations;
+ };
+ static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
+
+ enum class AttachmentDescriptionFlagBits
+ {
+ eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
+ };
+
+ using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
+
+ VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
+ {
+ return AttachmentDescriptionFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
+ {
+ return ~( AttachmentDescriptionFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
+ };
+ };
+
+ struct AttachmentDescription
+ {
+ AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
+ : flags( flags_ )
+ , format( format_ )
+ , samples( samples_ )
+ , loadOp( loadOp_ )
+ , storeOp( storeOp_ )
+ , stencilLoadOp( stencilLoadOp_ )
+ , stencilStoreOp( stencilStoreOp_ )
+ , initialLayout( initialLayout_ )
+ , finalLayout( finalLayout_ )
+ {
+ }
+
+ AttachmentDescription( VkAttachmentDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AttachmentDescription ) );
+ }
+
+ AttachmentDescription& operator=( VkAttachmentDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( AttachmentDescription ) );
+ return *this;
+ }
+ AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ AttachmentDescription& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ AttachmentDescription& setSamples( SampleCountFlagBits samples_ )
+ {
+ samples = samples_;
+ return *this;
+ }
+
+ AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ )
+ {
+ loadOp = loadOp_;
+ return *this;
+ }
+
+ AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ )
+ {
+ storeOp = storeOp_;
+ return *this;
+ }
+
+ AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
+ {
+ stencilLoadOp = stencilLoadOp_;
+ return *this;
+ }
+
+ AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
+ {
+ stencilStoreOp = stencilStoreOp_;
+ return *this;
+ }
+
+ AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ )
+ {
+ initialLayout = initialLayout_;
+ return *this;
+ }
+
+ AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ )
+ {
+ finalLayout = finalLayout_;
+ return *this;
+ }
+
+ operator const VkAttachmentDescription&() const
+ {
+ return *reinterpret_cast<const VkAttachmentDescription*>(this);
+ }
+
+ bool operator==( AttachmentDescription const& rhs ) const
+ {
+ return ( flags == rhs.flags )
+ && ( format == rhs.format )
+ && ( samples == rhs.samples )
+ && ( loadOp == rhs.loadOp )
+ && ( storeOp == rhs.storeOp )
+ && ( stencilLoadOp == rhs.stencilLoadOp )
+ && ( stencilStoreOp == rhs.stencilStoreOp )
+ && ( initialLayout == rhs.initialLayout )
+ && ( finalLayout == rhs.finalLayout );
+ }
+
+ bool operator!=( AttachmentDescription const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ AttachmentDescriptionFlags flags;
+ Format format;
+ SampleCountFlagBits samples;
+ AttachmentLoadOp loadOp;
+ AttachmentStoreOp storeOp;
+ AttachmentLoadOp stencilLoadOp;
+ AttachmentStoreOp stencilStoreOp;
+ ImageLayout initialLayout;
+ ImageLayout finalLayout;
+ };
+ static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
+
+ enum class StencilFaceFlagBits
+ {
+ eFront = VK_STENCIL_FACE_FRONT_BIT,
+ eBack = VK_STENCIL_FACE_BACK_BIT,
+ eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+ };
+
+ using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
+
+ VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
+ {
+ return StencilFaceFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
+ {
+ return ~( StencilFaceFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<StencilFaceFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
+ };
+ };
+
+ enum class DescriptorPoolCreateFlagBits
+ {
+ eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+ eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
+ };
+
+ using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
+
+ VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
+ {
+ return DescriptorPoolCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
+ {
+ return ~( DescriptorPoolCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT)
+ };
+ };
+
+ struct DescriptorPoolCreateInfo
+ {
+ DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
+ : flags( flags_ )
+ , maxSets( maxSets_ )
+ , poolSizeCount( poolSizeCount_ )
+ , pPoolSizes( pPoolSizes_ )
+ {
+ }
+
+ DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) );
+ }
+
+ DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) );
+ return *this;
+ }
+ DescriptorPoolCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ )
+ {
+ maxSets = maxSets_;
+ return *this;
+ }
+
+ DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ )
+ {
+ poolSizeCount = poolSizeCount_;
+ return *this;
+ }
+
+ DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ )
+ {
+ pPoolSizes = pPoolSizes_;
+ return *this;
+ }
+
+ operator const VkDescriptorPoolCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>(this);
+ }
+
+ bool operator==( DescriptorPoolCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( maxSets == rhs.maxSets )
+ && ( poolSizeCount == rhs.poolSizeCount )
+ && ( pPoolSizes == rhs.pPoolSizes );
+ }
+
+ bool operator!=( DescriptorPoolCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ DescriptorPoolCreateFlags flags;
+ uint32_t maxSets;
+ uint32_t poolSizeCount;
+ const DescriptorPoolSize* pPoolSizes;
+ };
+ static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class DependencyFlagBits
+ {
+ eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+ eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+ eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+ eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+ eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT
+ };
+
+ using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
+
+ VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
+ {
+ return DependencyFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
+ {
+ return ~( DependencyFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<DependencyFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal)
+ };
+ };
+
+ struct SubpassDependency
+ {
+ SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
+ : srcSubpass( srcSubpass_ )
+ , dstSubpass( dstSubpass_ )
+ , srcStageMask( srcStageMask_ )
+ , dstStageMask( dstStageMask_ )
+ , srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , dependencyFlags( dependencyFlags_ )
+ {
+ }
+
+ SubpassDependency( VkSubpassDependency const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubpassDependency ) );
+ }
+
+ SubpassDependency& operator=( VkSubpassDependency const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubpassDependency ) );
+ return *this;
+ }
+ SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
+ {
+ srcSubpass = srcSubpass_;
+ return *this;
+ }
+
+ SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
+ {
+ dstSubpass = dstSubpass_;
+ return *this;
+ }
+
+ SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
+ {
+ srcStageMask = srcStageMask_;
+ return *this;
+ }
+
+ SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
+ {
+ dstStageMask = dstStageMask_;
+ return *this;
+ }
+
+ SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
+ {
+ dependencyFlags = dependencyFlags_;
+ return *this;
+ }
+
+ operator const VkSubpassDependency&() const
+ {
+ return *reinterpret_cast<const VkSubpassDependency*>(this);
+ }
+
+ bool operator==( SubpassDependency const& rhs ) const
+ {
+ return ( srcSubpass == rhs.srcSubpass )
+ && ( dstSubpass == rhs.dstSubpass )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( dependencyFlags == rhs.dependencyFlags );
+ }
+
+ bool operator!=( SubpassDependency const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t srcSubpass;
+ uint32_t dstSubpass;
+ PipelineStageFlags srcStageMask;
+ PipelineStageFlags dstStageMask;
+ AccessFlags srcAccessMask;
+ AccessFlags dstAccessMask;
+ DependencyFlags dependencyFlags;
+ };
+ static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+
+ enum class PresentModeKHR
+ {
+ eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+ eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+ eFifo = VK_PRESENT_MODE_FIFO_KHR,
+ eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+ eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
+ eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
+ };
+
+ enum class ColorSpaceKHR
+ {
+ eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+ eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
+ eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
+ eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT,
+ eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
+ eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
+ eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
+ eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
+ eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
+ eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
+ eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
+ eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
+ eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
+ ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
+ eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT
+ };
+
+ struct SurfaceFormatKHR
+ {
+ operator const VkSurfaceFormatKHR&() const
+ {
+ return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
+ }
+
+ bool operator==( SurfaceFormatKHR const& rhs ) const
+ {
+ return ( format == rhs.format )
+ && ( colorSpace == rhs.colorSpace );
+ }
+
+ bool operator!=( SurfaceFormatKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ Format format;
+ ColorSpaceKHR colorSpace;
+ };
+ static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+
+ struct SurfaceFormat2KHR
+ {
+ operator const VkSurfaceFormat2KHR&() const
+ {
+ return *reinterpret_cast<const VkSurfaceFormat2KHR*>(this);
+ }
+
+ bool operator==( SurfaceFormat2KHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( surfaceFormat == rhs.surfaceFormat );
+ }
+
+ bool operator!=( SurfaceFormat2KHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSurfaceFormat2KHR;
+
+ public:
+ void* pNext = nullptr;
+ SurfaceFormatKHR surfaceFormat;
+ };
+ static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+
+ enum class DisplayPlaneAlphaFlagBitsKHR
+ {
+ eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+ eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+ ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+ ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+ };
+
+ using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
+
+ VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
+ {
+ return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
+ {
+ return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
+ };
+ };
+
+ struct DisplayPlaneCapabilitiesKHR
+ {
+ operator const VkDisplayPlaneCapabilitiesKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
+ }
+
+ bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
+ {
+ return ( supportedAlpha == rhs.supportedAlpha )
+ && ( minSrcPosition == rhs.minSrcPosition )
+ && ( maxSrcPosition == rhs.maxSrcPosition )
+ && ( minSrcExtent == rhs.minSrcExtent )
+ && ( maxSrcExtent == rhs.maxSrcExtent )
+ && ( minDstPosition == rhs.minDstPosition )
+ && ( maxDstPosition == rhs.maxDstPosition )
+ && ( minDstExtent == rhs.minDstExtent )
+ && ( maxDstExtent == rhs.maxDstExtent );
+ }
+
+ bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DisplayPlaneAlphaFlagsKHR supportedAlpha;
+ Offset2D minSrcPosition;
+ Offset2D maxSrcPosition;
+ Extent2D minSrcExtent;
+ Extent2D maxSrcExtent;
+ Offset2D minDstPosition;
+ Offset2D maxDstPosition;
+ Extent2D minDstExtent;
+ Extent2D maxDstExtent;
+ };
+ static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+ enum class CompositeAlphaFlagBitsKHR
+ {
+ eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+ ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+ ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+ eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+ };
+
+ using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
+
+ VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
+ {
+ return CompositeAlphaFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
+ {
+ return ~( CompositeAlphaFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
+ };
+ };
+
+ enum class SurfaceTransformFlagBitsKHR
+ {
+ eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+ eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+ eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+ eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+ eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+ eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+ eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+ eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+ eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+ };
+
+ using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
+
+ VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
+ {
+ return SurfaceTransformFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
+ {
+ return ~( SurfaceTransformFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
+ };
+ };
+
+ struct DisplayPropertiesKHR
+ {
+ operator const VkDisplayPropertiesKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
+ }
+
+ bool operator==( DisplayPropertiesKHR const& rhs ) const
+ {
+ return ( display == rhs.display )
+ && ( displayName == rhs.displayName )
+ && ( physicalDimensions == rhs.physicalDimensions )
+ && ( physicalResolution == rhs.physicalResolution )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( planeReorderPossible == rhs.planeReorderPossible )
+ && ( persistentContent == rhs.persistentContent );
+ }
+
+ bool operator!=( DisplayPropertiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ DisplayKHR display;
+ const char* displayName;
+ Extent2D physicalDimensions;
+ Extent2D physicalResolution;
+ SurfaceTransformFlagsKHR supportedTransforms;
+ Bool32 planeReorderPossible;
+ Bool32 persistentContent;
+ };
+ static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+
+ struct DisplaySurfaceCreateInfoKHR
+ {
+ DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
+ : flags( flags_ )
+ , displayMode( displayMode_ )
+ , planeIndex( planeIndex_ )
+ , planeStackIndex( planeStackIndex_ )
+ , transform( transform_ )
+ , globalAlpha( globalAlpha_ )
+ , alphaMode( alphaMode_ )
+ , imageExtent( imageExtent_ )
+ {
+ }
+
+ DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
+ }
+
+ DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
+ return *this;
+ }
+ DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
+ {
+ displayMode = displayMode_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
+ {
+ planeIndex = planeIndex_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
+ {
+ planeStackIndex = planeStackIndex_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
+ {
+ transform = transform_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
+ {
+ globalAlpha = globalAlpha_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
+ {
+ alphaMode = alphaMode_;
+ return *this;
+ }
+
+ DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+
+ operator const VkDisplaySurfaceCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
+ }
+
+ bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( displayMode == rhs.displayMode )
+ && ( planeIndex == rhs.planeIndex )
+ && ( planeStackIndex == rhs.planeStackIndex )
+ && ( transform == rhs.transform )
+ && ( globalAlpha == rhs.globalAlpha )
+ && ( alphaMode == rhs.alphaMode )
+ && ( imageExtent == rhs.imageExtent );
+ }
+
+ bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ DisplaySurfaceCreateFlagsKHR flags;
+ DisplayModeKHR displayMode;
+ uint32_t planeIndex;
+ uint32_t planeStackIndex;
+ SurfaceTransformFlagBitsKHR transform;
+ float globalAlpha;
+ DisplayPlaneAlphaFlagBitsKHR alphaMode;
+ Extent2D imageExtent;
+ };
+ static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ struct SurfaceCapabilitiesKHR
+ {
+ operator const VkSurfaceCapabilitiesKHR&() const
+ {
+ return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
+ }
+
+ bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
+ {
+ return ( minImageCount == rhs.minImageCount )
+ && ( maxImageCount == rhs.maxImageCount )
+ && ( currentExtent == rhs.currentExtent )
+ && ( minImageExtent == rhs.minImageExtent )
+ && ( maxImageExtent == rhs.maxImageExtent )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( currentTransform == rhs.currentTransform )
+ && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+ && ( supportedUsageFlags == rhs.supportedUsageFlags );
+ }
+
+ bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ uint32_t minImageCount;
+ uint32_t maxImageCount;
+ Extent2D currentExtent;
+ Extent2D minImageExtent;
+ Extent2D maxImageExtent;
+ uint32_t maxImageArrayLayers;
+ SurfaceTransformFlagsKHR supportedTransforms;
+ SurfaceTransformFlagBitsKHR currentTransform;
+ CompositeAlphaFlagsKHR supportedCompositeAlpha;
+ ImageUsageFlags supportedUsageFlags;
+ };
+ static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+ struct SurfaceCapabilities2KHR
+ {
+ operator const VkSurfaceCapabilities2KHR&() const
+ {
+ return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>(this);
+ }
+
+ bool operator==( SurfaceCapabilities2KHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( surfaceCapabilities == rhs.surfaceCapabilities );
+ }
+
+ bool operator!=( SurfaceCapabilities2KHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSurfaceCapabilities2KHR;
+
+ public:
+ void* pNext = nullptr;
+ SurfaceCapabilitiesKHR surfaceCapabilities;
+ };
+ static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
+
+ enum class DebugReportFlagBitsEXT
+ {
+ eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+ eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+ };
+
+ using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
+
+ VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
+ {
+ return DebugReportFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
+ {
+ return ~( DebugReportFlagsEXT( bits ) );
+ }
+
+ template <> struct FlagTraits<DebugReportFlagBitsEXT>
+ {
+ enum
+ {
+ allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
+ };
+ };
+
+ struct DebugReportCallbackCreateInfoEXT
+ {
+ DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
+ : flags( flags_ )
+ , pfnCallback( pfnCallback_ )
+ , pUserData( pUserData_ )
+ {
+ }
+
+ DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
+ }
+
+ DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
+ return *this;
+ }
+ DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
+ {
+ pfnCallback = pfnCallback_;
+ return *this;
+ }
+
+ DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
+ {
+ pUserData = pUserData_;
+ return *this;
+ }
+
+ operator const VkDebugReportCallbackCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
+ }
+
+ bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( pfnCallback == rhs.pfnCallback )
+ && ( pUserData == rhs.pUserData );
+ }
+
+ bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DebugReportFlagsEXT flags;
+ PFN_vkDebugReportCallbackEXT pfnCallback;
+ void* pUserData;
+ };
+ static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DebugReportObjectTypeEXT
+ {
+ eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+ eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+ eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+ eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+ eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+ ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+ ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+ eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+ eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+ eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+ eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+ eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ eDebugReportCallbackExt = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+ eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+ eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+ eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
+ eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
+ eValidationCacheExt = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+ eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+ eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+ eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+ eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT
+ };
+
+ struct DebugMarkerObjectNameInfoEXT
+ {
+ DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
+ : objectType( objectType_ )
+ , object( object_ )
+ , pObjectName( pObjectName_ )
+ {
+ }
+
+ DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
+ }
+
+ DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
+ return *this;
+ }
+ DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+ {
+ objectType = objectType_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
+ {
+ object = object_;
+ return *this;
+ }
+
+ DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+ {
+ pObjectName = pObjectName_;
+ return *this;
+ }
+
+ operator const VkDebugMarkerObjectNameInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
+ }
+
+ bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectType == rhs.objectType )
+ && ( object == rhs.object )
+ && ( pObjectName == rhs.pObjectName );
+ }
+
+ bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DebugReportObjectTypeEXT objectType;
+ uint64_t object;
+ const char* pObjectName;
+ };
+ static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+
+ struct DebugMarkerObjectTagInfoEXT
+ {
+ DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
+ : objectType( objectType_ )
+ , object( object_ )
+ , tagName( tagName_ )
+ , tagSize( tagSize_ )
+ , pTag( pTag_ )
+ {
+ }
+
+ DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
+ }
+
+ DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
+ return *this;
+ }
+ DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+ {
+ objectType = objectType_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
+ {
+ object = object_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+ {
+ tagName = tagName_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+ {
+ tagSize = tagSize_;
+ return *this;
+ }
+
+ DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
+ {
+ pTag = pTag_;
+ return *this;
+ }
+
+ operator const VkDebugMarkerObjectTagInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
+ }
+
+ bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectType == rhs.objectType )
+ && ( object == rhs.object )
+ && ( tagName == rhs.tagName )
+ && ( tagSize == rhs.tagSize )
+ && ( pTag == rhs.pTag );
+ }
+
+ bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DebugReportObjectTypeEXT objectType;
+ uint64_t object;
+ uint64_t tagName;
+ size_t tagSize;
+ const void* pTag;
+ };
+ static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class RasterizationOrderAMD
+ {
+ eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+ eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+ };
+
+ struct PipelineRasterizationStateRasterizationOrderAMD
+ {
+ PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
+ : rasterizationOrder( rasterizationOrder_ )
+ {
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
+ return *this;
+ }
+ PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
+ {
+ rasterizationOrder = rasterizationOrder_;
+ return *this;
+ }
+
+ operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
+ {
+ return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
+ }
+
+ bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( rasterizationOrder == rhs.rasterizationOrder );
+ }
+
+ bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+
+ public:
+ const void* pNext = nullptr;
+ RasterizationOrderAMD rasterizationOrder;
+ };
+ static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+
+ enum class ExternalMemoryHandleTypeFlagBitsNV
+ {
+ eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+ eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+ eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+ eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+ };
+
+ using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
+
+ VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
+ {
+ return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
+ {
+ return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
+ };
+ };
+
+ struct ExternalMemoryImageCreateInfoNV
+ {
+ ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
+ }
+
+ ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
+ return *this;
+ }
+ ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExternalMemoryImageCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
+ }
+
+ bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagsNV handleTypes;
+ };
+ static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+
+ struct ExportMemoryAllocateInfoNV
+ {
+ ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
+ }
+
+ ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
+ return *this;
+ }
+ ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExportMemoryAllocateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
+ }
+
+ bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagsNV handleTypes;
+ };
+ static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ struct ImportMemoryWin32HandleInfoNV
+ {
+ ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
+ : handleType( handleType_ )
+ , handle( handle_ )
+ {
+ }
+
+ ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
+ }
+
+ ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
+ return *this;
+ }
+ ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
+ {
+ handle = handle_;
+ return *this;
+ }
+
+ operator const VkImportMemoryWin32HandleInfoNV&() const
+ {
+ return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
+ }
+
+ bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle );
+ }
+
+ bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagsNV handleType;
+ HANDLE handle;
+ };
+ static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+
+ enum class ExternalMemoryFeatureFlagBitsNV
+ {
+ eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+ eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+ eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+ };
+
+ using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+
+ VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
+ {
+ return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
+ {
+ return ~( ExternalMemoryFeatureFlagsNV( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
+ };
+ };
+
+ struct ExternalImageFormatPropertiesNV
+ {
+ operator const VkExternalImageFormatPropertiesNV&() const
+ {
+ return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
+ }
+
+ bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
+ {
+ return ( imageFormatProperties == rhs.imageFormatProperties )
+ && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ }
+
+ bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ImageFormatProperties imageFormatProperties;
+ ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
+ ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
+ ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+ };
+ static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+
+ enum class ValidationCheckEXT
+ {
+ eAll = VK_VALIDATION_CHECK_ALL_EXT,
+ eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
+ };
+
+ struct ValidationFlagsEXT
+ {
+ ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
+ : disabledValidationCheckCount( disabledValidationCheckCount_ )
+ , pDisabledValidationChecks( pDisabledValidationChecks_ )
+ {
+ }
+
+ ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) );
+ }
+
+ ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) );
+ return *this;
+ }
+ ValidationFlagsEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
+ {
+ disabledValidationCheckCount = disabledValidationCheckCount_;
+ return *this;
+ }
+
+ ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
+ {
+ pDisabledValidationChecks = pDisabledValidationChecks_;
+ return *this;
+ }
+
+ operator const VkValidationFlagsEXT&() const
+ {
+ return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
+ }
+
+ bool operator==( ValidationFlagsEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+ && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+ }
+
+ bool operator!=( ValidationFlagsEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eValidationFlagsEXT;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t disabledValidationCheckCount;
+ ValidationCheckEXT* pDisabledValidationChecks;
+ };
+ static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+
+ enum class SubgroupFeatureFlagBits
+ {
+ eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
+ eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
+ eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+ eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
+ eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+ eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+ eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+ eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
+ ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
+ };
+
+ using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits, VkSubgroupFeatureFlags>;
+
+ VULKAN_HPP_INLINE SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 )
+ {
+ return SubgroupFeatureFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits )
+ {
+ return ~( SubgroupFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SubgroupFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
+ };
+ };
+
+ struct PhysicalDeviceSubgroupProperties
+ {
+ operator const VkPhysicalDeviceSubgroupProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( subgroupSize == rhs.subgroupSize )
+ && ( supportedStages == rhs.supportedStages )
+ && ( supportedOperations == rhs.supportedOperations )
+ && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
+ }
+
+ bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t subgroupSize;
+ ShaderStageFlags supportedStages;
+ SubgroupFeatureFlags supportedOperations;
+ Bool32 quadOperationsInAllStages;
+ };
+ static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
+
+ enum class IndirectCommandsLayoutUsageFlagBitsNVX
+ {
+ eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
+ eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
+ eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
+ eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+ };
+
+ using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
+
+ VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
+ {
+ return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
+ {
+ return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
+ }
+
+ template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
+ {
+ enum
+ {
+ allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
+ };
+ };
+
+ enum class ObjectEntryUsageFlagBitsNVX
+ {
+ eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
+ eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
+ };
+
+ using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
+
+ VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
+ {
+ return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
+ {
+ return ~( ObjectEntryUsageFlagsNVX( bits ) );
+ }
+
+ template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
+ {
+ enum
+ {
+ allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
+ };
+ };
+
+ enum class IndirectCommandsTokenTypeNVX
+ {
+ ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
+ eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX,
+ eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX,
+ eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX,
+ ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX,
+ eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX,
+ eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX,
+ eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX
+ };
+
+ struct IndirectCommandsTokenNVX
+ {
+ IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
+ : tokenType( tokenType_ )
+ , buffer( buffer_ )
+ , offset( offset_ )
+ {
+ }
+
+ IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) );
+ }
+
+ IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) );
+ return *this;
+ }
+ IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+ {
+ tokenType = tokenType_;
+ return *this;
+ }
+
+ IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ operator const VkIndirectCommandsTokenNVX&() const
+ {
+ return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
+ }
+
+ bool operator==( IndirectCommandsTokenNVX const& rhs ) const
+ {
+ return ( tokenType == rhs.tokenType )
+ && ( buffer == rhs.buffer )
+ && ( offset == rhs.offset );
+ }
+
+ bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ IndirectCommandsTokenTypeNVX tokenType;
+ Buffer buffer;
+ DeviceSize offset;
+ };
+ static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
+
+ struct IndirectCommandsLayoutTokenNVX
+ {
+ IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
+ : tokenType( tokenType_ )
+ , bindingUnit( bindingUnit_ )
+ , dynamicCount( dynamicCount_ )
+ , divisor( divisor_ )
+ {
+ }
+
+ IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) );
+ }
+
+ IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) );
+ return *this;
+ }
+ IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+ {
+ tokenType = tokenType_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
+ {
+ bindingUnit = bindingUnit_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
+ {
+ dynamicCount = dynamicCount_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
+ {
+ divisor = divisor_;
+ return *this;
+ }
+
+ operator const VkIndirectCommandsLayoutTokenNVX&() const
+ {
+ return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
+ }
+
+ bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
+ {
+ return ( tokenType == rhs.tokenType )
+ && ( bindingUnit == rhs.bindingUnit )
+ && ( dynamicCount == rhs.dynamicCount )
+ && ( divisor == rhs.divisor );
+ }
+
+ bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ IndirectCommandsTokenTypeNVX tokenType;
+ uint32_t bindingUnit;
+ uint32_t dynamicCount;
+ uint32_t divisor;
+ };
+ static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
+
+ struct IndirectCommandsLayoutCreateInfoNVX
+ {
+ IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
+ : pipelineBindPoint( pipelineBindPoint_ )
+ , flags( flags_ )
+ , tokenCount( tokenCount_ )
+ , pTokens( pTokens_ )
+ {
+ }
+
+ IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) );
+ }
+
+ IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) );
+ return *this;
+ }
+ IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+ {
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
+ {
+ tokenCount = tokenCount_;
+ return *this;
+ }
+
+ IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
+ {
+ pTokens = pTokens_;
+ return *this;
+ }
+
+ operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
+ {
+ return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
+ }
+
+ bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( flags == rhs.flags )
+ && ( tokenCount == rhs.tokenCount )
+ && ( pTokens == rhs.pTokens );
+ }
+
+ bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineBindPoint pipelineBindPoint;
+ IndirectCommandsLayoutUsageFlagsNVX flags;
+ uint32_t tokenCount;
+ const IndirectCommandsLayoutTokenNVX* pTokens;
+ };
+ static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
+
+ enum class ObjectEntryTypeNVX
+ {
+ eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
+ ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX,
+ eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX,
+ eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX,
+ ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX
+ };
+
+ struct ObjectTableCreateInfoNVX
+ {
+ ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
+ : objectCount( objectCount_ )
+ , pObjectEntryTypes( pObjectEntryTypes_ )
+ , pObjectEntryCounts( pObjectEntryCounts_ )
+ , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
+ , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
+ , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
+ , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
+ , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
+ , maxPipelineLayouts( maxPipelineLayouts_ )
+ {
+ }
+
+ ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) );
+ }
+
+ ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) );
+ return *this;
+ }
+ ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
+ {
+ objectCount = objectCount_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
+ {
+ pObjectEntryTypes = pObjectEntryTypes_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
+ {
+ pObjectEntryCounts = pObjectEntryCounts_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
+ {
+ pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
+ {
+ maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
+ {
+ maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
+ {
+ maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
+ {
+ maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
+ return *this;
+ }
+
+ ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
+ {
+ maxPipelineLayouts = maxPipelineLayouts_;
+ return *this;
+ }
+
+ operator const VkObjectTableCreateInfoNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
+ }
+
+ bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectCount == rhs.objectCount )
+ && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
+ && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
+ && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
+ && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
+ && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
+ && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
+ && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
+ && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
+ }
+
+ bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eObjectTableCreateInfoNVX;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t objectCount;
+ const ObjectEntryTypeNVX* pObjectEntryTypes;
+ const uint32_t* pObjectEntryCounts;
+ const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
+ uint32_t maxUniformBuffersPerDescriptor;
+ uint32_t maxStorageBuffersPerDescriptor;
+ uint32_t maxStorageImagesPerDescriptor;
+ uint32_t maxSampledImagesPerDescriptor;
+ uint32_t maxPipelineLayouts;
+ };
+ static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableEntryNVX
+ {
+ ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
+ : type( type_ )
+ , flags( flags_ )
+ {
+ }
+
+ ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) );
+ }
+
+ ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) );
+ return *this;
+ }
+ ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ operator const VkObjectTableEntryNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
+ }
+
+ bool operator==( ObjectTableEntryNVX const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( flags == rhs.flags );
+ }
+
+ bool operator!=( ObjectTableEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ };
+ static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTablePipelineEntryNVX
+ {
+ ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
+ : type( type_ )
+ , flags( flags_ )
+ , pipeline( pipeline_ )
+ {
+ }
+
+ ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
+ }
+
+ ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
+ return *this;
+ }
+ ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
+ {
+ pipeline = pipeline_;
+ return *this;
+ }
+
+ operator const VkObjectTablePipelineEntryNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
+ }
+
+ bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( pipeline == rhs.pipeline );
+ }
+
+ bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ Pipeline pipeline;
+ };
+ static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableDescriptorSetEntryNVX
+ {
+ ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
+ : type( type_ )
+ , flags( flags_ )
+ , pipelineLayout( pipelineLayout_ )
+ , descriptorSet( descriptorSet_ )
+ {
+ }
+
+ ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
+ }
+
+ ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
+ return *this;
+ }
+ ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
+ {
+ pipelineLayout = pipelineLayout_;
+ return *this;
+ }
+
+ ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
+ {
+ descriptorSet = descriptorSet_;
+ return *this;
+ }
+
+ operator const VkObjectTableDescriptorSetEntryNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
+ }
+
+ bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( pipelineLayout == rhs.pipelineLayout )
+ && ( descriptorSet == rhs.descriptorSet );
+ }
+
+ bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ PipelineLayout pipelineLayout;
+ DescriptorSet descriptorSet;
+ };
+ static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableVertexBufferEntryNVX
+ {
+ ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
+ : type( type_ )
+ , flags( flags_ )
+ , buffer( buffer_ )
+ {
+ }
+
+ ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
+ }
+
+ ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
+ return *this;
+ }
+ ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ operator const VkObjectTableVertexBufferEntryNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
+ }
+
+ bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( buffer == rhs.buffer );
+ }
+
+ bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ Buffer buffer;
+ };
+ static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTableIndexBufferEntryNVX
+ {
+ ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer(), IndexType indexType_ = IndexType::eUint16 )
+ : type( type_ )
+ , flags( flags_ )
+ , buffer( buffer_ )
+ , indexType( indexType_ )
+ {
+ }
+
+ ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
+ }
+
+ ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
+ return *this;
+ }
+ ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ )
+ {
+ indexType = indexType_;
+ return *this;
+ }
+
+ operator const VkObjectTableIndexBufferEntryNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
+ }
+
+ bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( buffer == rhs.buffer )
+ && ( indexType == rhs.indexType );
+ }
+
+ bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ Buffer buffer;
+ IndexType indexType;
+ };
+ static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
+
+ struct ObjectTablePushConstantEntryNVX
+ {
+ ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
+ : type( type_ )
+ , flags( flags_ )
+ , pipelineLayout( pipelineLayout_ )
+ , stageFlags( stageFlags_ )
+ {
+ }
+
+ ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
+ }
+
+ ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
+ return *this;
+ }
+ ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
+ {
+ type = type_;
+ return *this;
+ }
+
+ ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
+ {
+ pipelineLayout = pipelineLayout_;
+ return *this;
+ }
+
+ ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
+ {
+ stageFlags = stageFlags_;
+ return *this;
+ }
+
+ operator const VkObjectTablePushConstantEntryNVX&() const
+ {
+ return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
+ }
+
+ bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
+ {
+ return ( type == rhs.type )
+ && ( flags == rhs.flags )
+ && ( pipelineLayout == rhs.pipelineLayout )
+ && ( stageFlags == rhs.stageFlags );
+ }
+
+ bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ObjectEntryTypeNVX type;
+ ObjectEntryUsageFlagsNVX flags;
+ PipelineLayout pipelineLayout;
+ ShaderStageFlags stageFlags;
+ };
+ static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
+
+ enum class DescriptorSetLayoutCreateFlagBits
+ {
+ ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+ eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
+ };
+
+ using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
+
+ VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
+ {
+ return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits )
+ {
+ return ~( DescriptorSetLayoutCreateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT)
+ };
+ };
+
+ struct DescriptorSetLayoutCreateInfo
+ {
+ DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), uint32_t bindingCount_ = 0, const DescriptorSetLayoutBinding* pBindings_ = nullptr )
+ : flags( flags_ )
+ , bindingCount( bindingCount_ )
+ , pBindings( pBindings_ )
+ {
+ }
+
+ DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
+ }
+
+ DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
+ return *this;
+ }
+ DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ )
+ {
+ bindingCount = bindingCount_;
+ return *this;
+ }
+
+ DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ )
+ {
+ pBindings = pBindings_;
+ return *this;
+ }
+
+ operator const VkDescriptorSetLayoutCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>(this);
+ }
+
+ bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( bindingCount == rhs.bindingCount )
+ && ( pBindings == rhs.pBindings );
+ }
+
+ bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ DescriptorSetLayoutCreateFlags flags;
+ uint32_t bindingCount;
+ const DescriptorSetLayoutBinding* pBindings;
+ };
+ static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class ExternalMemoryHandleTypeFlagBits
+ {
+ eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+ eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+ eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+ eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+ eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+ eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+ eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+ eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+ eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+ eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
+ eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+ eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT
+ };
+
+ using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits, VkExternalMemoryHandleTypeFlags>;
+
+ VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 )
+ {
+ return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits )
+ {
+ return ~( ExternalMemoryHandleTypeFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT)
+ };
+ };
+
+ using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
+
+ struct PhysicalDeviceExternalImageFormatInfo
+ {
+ PhysicalDeviceExternalImageFormatInfo( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
+ : handleType( handleType_ )
+ {
+ }
+
+ PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) );
+ }
+
+ PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) );
+ return *this;
+ }
+ PhysicalDeviceExternalImageFormatInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalImageFormatInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceExternalImageFormatInfo&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>(this);
+ }
+
+ bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+
+ struct PhysicalDeviceExternalBufferInfo
+ {
+ PhysicalDeviceExternalBufferInfo( BufferCreateFlags flags_ = BufferCreateFlags(), BufferUsageFlags usage_ = BufferUsageFlags(), ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
+ : flags( flags_ )
+ , usage( usage_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) );
+ }
+
+ PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) );
+ return *this;
+ }
+ PhysicalDeviceExternalBufferInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalBufferInfo& setFlags( BufferCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalBufferInfo& setUsage( BufferUsageFlags usage_ )
+ {
+ usage = usage_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalBufferInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceExternalBufferInfo&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>(this);
+ }
+
+ bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( usage == rhs.usage )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
+
+ public:
+ const void* pNext = nullptr;
+ BufferCreateFlags flags;
+ BufferUsageFlags usage;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+
+ struct ExternalMemoryImageCreateInfo
+ {
+ ExternalMemoryImageCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) );
+ }
+
+ ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) );
+ return *this;
+ }
+ ExternalMemoryImageCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExternalMemoryImageCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExternalMemoryImageCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>(this);
+ }
+
+ bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlags handleTypes;
+ };
+ static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
+
+ using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+
+ struct ExternalMemoryBufferCreateInfo
+ {
+ ExternalMemoryBufferCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) );
+ }
+
+ ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) );
+ return *this;
+ }
+ ExternalMemoryBufferCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExternalMemoryBufferCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExternalMemoryBufferCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>(this);
+ }
+
+ bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlags handleTypes;
+ };
+ static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
+
+ using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
+
+ struct ExportMemoryAllocateInfo
+ {
+ ExportMemoryAllocateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) );
+ }
+
+ ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) );
+ return *this;
+ }
+ ExportMemoryAllocateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportMemoryAllocateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExportMemoryAllocateInfo&() const
+ {
+ return *reinterpret_cast<const VkExportMemoryAllocateInfo*>(this);
+ }
+
+ bool operator==( ExportMemoryAllocateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExportMemoryAllocateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportMemoryAllocateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlags handleTypes;
+ };
+ static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
+
+ using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportMemoryWin32HandleInfoKHR
+ {
+ ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 )
+ : handleType( handleType_ )
+ , handle( handle_ )
+ , name( name_ )
+ {
+ }
+
+ ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
+ }
+
+ ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
+ return *this;
+ }
+ ImportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoKHR& setHandle( HANDLE handle_ )
+ {
+ handle = handle_;
+ return *this;
+ }
+
+ ImportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ )
+ {
+ name = name_;
+ return *this;
+ }
+
+ operator const VkImportMemoryWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ HANDLE handle;
+ LPCWSTR name;
+ };
+ static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct MemoryGetWin32HandleInfoKHR
+ {
+ MemoryGetWin32HandleInfoKHR( DeviceMemory memory_ = DeviceMemory(), ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
+ : memory( memory_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
+ }
+
+ MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
+ return *this;
+ }
+ MemoryGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryGetWin32HandleInfoKHR& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ MemoryGetWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkMemoryGetWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceMemory memory;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ImportMemoryFdInfoKHR
+ {
+ ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = 0 )
+ : handleType( handleType_ )
+ , fd( fd_ )
+ {
+ }
+
+ ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) );
+ }
+
+ ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) );
+ return *this;
+ }
+ ImportMemoryFdInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportMemoryFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportMemoryFdInfoKHR& setFd( int fd_ )
+ {
+ fd = fd_;
+ return *this;
+ }
+
+ operator const VkImportMemoryFdInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>(this);
+ }
+
+ bool operator==( ImportMemoryFdInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType )
+ && ( fd == rhs.fd );
+ }
+
+ bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ int fd;
+ };
+ static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
+
+ struct MemoryGetFdInfoKHR
+ {
+ MemoryGetFdInfoKHR( DeviceMemory memory_ = DeviceMemory(), ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
+ : memory( memory_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) );
+ }
+
+ MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) );
+ return *this;
+ }
+ MemoryGetFdInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryGetFdInfoKHR& setMemory( DeviceMemory memory_ )
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ MemoryGetFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkMemoryGetFdInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>(this);
+ }
+
+ bool operator==( MemoryGetFdInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( MemoryGetFdInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceMemory memory;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+
+ struct ImportMemoryHostPointerInfoEXT
+ {
+ ImportMemoryHostPointerInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void* pHostPointer_ = nullptr )
+ : handleType( handleType_ )
+ , pHostPointer( pHostPointer_ )
+ {
+ }
+
+ ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) );
+ }
+
+ ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) );
+ return *this;
+ }
+ ImportMemoryHostPointerInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportMemoryHostPointerInfoEXT& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportMemoryHostPointerInfoEXT& setPHostPointer( void* pHostPointer_ )
+ {
+ pHostPointer = pHostPointer_;
+ return *this;
+ }
+
+ operator const VkImportMemoryHostPointerInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>(this);
+ }
+
+ bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType )
+ && ( pHostPointer == rhs.pHostPointer );
+ }
+
+ bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalMemoryHandleTypeFlagBits handleType;
+ void* pHostPointer;
+ };
+ static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class ExternalMemoryFeatureFlagBits
+ {
+ eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+ eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+ eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+ eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+ eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
+ eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
+ };
+
+ using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits, VkExternalMemoryFeatureFlags>;
+
+ VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 )
+ {
+ return ExternalMemoryFeatureFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits )
+ {
+ return ~( ExternalMemoryFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable)
+ };
+ };
+
+ using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
+
+ struct ExternalMemoryProperties
+ {
+ operator const VkExternalMemoryProperties&() const
+ {
+ return *reinterpret_cast<const VkExternalMemoryProperties*>(this);
+ }
+
+ bool operator==( ExternalMemoryProperties const& rhs ) const
+ {
+ return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ }
+
+ bool operator!=( ExternalMemoryProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ExternalMemoryFeatureFlags externalMemoryFeatures;
+ ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes;
+ ExternalMemoryHandleTypeFlags compatibleHandleTypes;
+ };
+ static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
+
+ using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+
+ struct ExternalImageFormatProperties
+ {
+ operator const VkExternalImageFormatProperties&() const
+ {
+ return *reinterpret_cast<const VkExternalImageFormatProperties*>(this);
+ }
+
+ bool operator==( ExternalImageFormatProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( externalMemoryProperties == rhs.externalMemoryProperties );
+ }
+
+ bool operator!=( ExternalImageFormatProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalImageFormatProperties;
+
+ public:
+ void* pNext = nullptr;
+ ExternalMemoryProperties externalMemoryProperties;
+ };
+ static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
+
+ using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+
+ struct ExternalBufferProperties
+ {
+ operator const VkExternalBufferProperties&() const
+ {
+ return *reinterpret_cast<const VkExternalBufferProperties*>(this);
+ }
+
+ bool operator==( ExternalBufferProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( externalMemoryProperties == rhs.externalMemoryProperties );
+ }
+
+ bool operator!=( ExternalBufferProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalBufferProperties;
+
+ public:
+ void* pNext = nullptr;
+ ExternalMemoryProperties externalMemoryProperties;
+ };
+ static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
+
+ using ExternalBufferPropertiesKHR = ExternalBufferProperties;
+
+ enum class ExternalSemaphoreHandleTypeFlagBits
+ {
+ eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+ eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+ eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+ eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT
+ };
+
+ using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits, VkExternalSemaphoreHandleTypeFlags>;
+
+ VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 )
+ {
+ return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits )
+ {
+ return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd)
+ };
+ };
+
+ using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
+
+ struct PhysicalDeviceExternalSemaphoreInfo
+ {
+ PhysicalDeviceExternalSemaphoreInfo( ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
+ : handleType( handleType_ )
+ {
+ }
+
+ PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) );
+ }
+
+ PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) );
+ return *this;
+ }
+ PhysicalDeviceExternalSemaphoreInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalSemaphoreInfo& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceExternalSemaphoreInfo&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>(this);
+ }
+
+ bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalSemaphoreHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+
+ struct ExportSemaphoreCreateInfo
+ {
+ ExportSemaphoreCreateInfo( ExternalSemaphoreHandleTypeFlags handleTypes_ = ExternalSemaphoreHandleTypeFlags() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) );
+ }
+
+ ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) );
+ return *this;
+ }
+ ExportSemaphoreCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportSemaphoreCreateInfo& setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExportSemaphoreCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>(this);
+ }
+
+ bool operator==( ExportSemaphoreCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalSemaphoreHandleTypeFlags handleTypes;
+ };
+ static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+
+ using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct SemaphoreGetWin32HandleInfoKHR
+ {
+ SemaphoreGetWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
+ : semaphore( semaphore_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
+ }
+
+ SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
+ return *this;
+ }
+ SemaphoreGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SemaphoreGetWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ )
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ SemaphoreGetWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkSemaphoreGetWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Semaphore semaphore;
+ ExternalSemaphoreHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct SemaphoreGetFdInfoKHR
+ {
+ SemaphoreGetFdInfoKHR( Semaphore semaphore_ = Semaphore(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
+ : semaphore( semaphore_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
+ }
+
+ SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
+ return *this;
+ }
+ SemaphoreGetFdInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SemaphoreGetFdInfoKHR& setSemaphore( Semaphore semaphore_ )
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ SemaphoreGetFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkSemaphoreGetFdInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>(this);
+ }
+
+ bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Semaphore semaphore;
+ ExternalSemaphoreHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class ExternalSemaphoreFeatureFlagBits
+ {
+ eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+ eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+ eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
+ eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
+ };
+
+ using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits, VkExternalSemaphoreFeatureFlags>;
+
+ VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 )
+ {
+ return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits )
+ {
+ return ~( ExternalSemaphoreFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable)
+ };
+ };
+
+ using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
+
+ struct ExternalSemaphoreProperties
+ {
+ operator const VkExternalSemaphoreProperties&() const
+ {
+ return *reinterpret_cast<const VkExternalSemaphoreProperties*>(this);
+ }
+
+ bool operator==( ExternalSemaphoreProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+ && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+ }
+
+ bool operator!=( ExternalSemaphoreProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalSemaphoreProperties;
+
+ public:
+ void* pNext = nullptr;
+ ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes;
+ ExternalSemaphoreHandleTypeFlags compatibleHandleTypes;
+ ExternalSemaphoreFeatureFlags externalSemaphoreFeatures;
+ };
+ static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
+
+ using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+
+ enum class SemaphoreImportFlagBits
+ {
+ eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
+ eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
+ };
+
+ using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits, VkSemaphoreImportFlags>;
+
+ VULKAN_HPP_INLINE SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 )
+ {
+ return SemaphoreImportFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits )
+ {
+ return ~( SemaphoreImportFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SemaphoreImportFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary)
+ };
+ };
+
+ using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportSemaphoreWin32HandleInfoKHR
+ {
+ ImportSemaphoreWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), SemaphoreImportFlags flags_ = SemaphoreImportFlags(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 )
+ : semaphore( semaphore_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , handle( handle_ )
+ , name( name_ )
+ {
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
+ return *this;
+ }
+ ImportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ )
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR& setFlags( SemaphoreImportFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR& setHandle( HANDLE handle_ )
+ {
+ handle = handle_;
+ return *this;
+ }
+
+ ImportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ )
+ {
+ name = name_;
+ return *this;
+ }
+
+ operator const VkImportSemaphoreWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Semaphore semaphore;
+ SemaphoreImportFlags flags;
+ ExternalSemaphoreHandleTypeFlagBits handleType;
+ HANDLE handle;
+ LPCWSTR name;
+ };
+ static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ImportSemaphoreFdInfoKHR
+ {
+ ImportSemaphoreFdInfoKHR( Semaphore semaphore_ = Semaphore(), SemaphoreImportFlags flags_ = SemaphoreImportFlags(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = 0 )
+ : semaphore( semaphore_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , fd( fd_ )
+ {
+ }
+
+ ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
+ }
+
+ ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
+ return *this;
+ }
+ ImportSemaphoreFdInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportSemaphoreFdInfoKHR& setSemaphore( Semaphore semaphore_ )
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ ImportSemaphoreFdInfoKHR& setFlags( SemaphoreImportFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImportSemaphoreFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportSemaphoreFdInfoKHR& setFd( int fd_ )
+ {
+ fd = fd_;
+ return *this;
+ }
+
+ operator const VkImportSemaphoreFdInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>(this);
+ }
+
+ bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( fd == rhs.fd );
+ }
+
+ bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Semaphore semaphore;
+ SemaphoreImportFlags flags;
+ ExternalSemaphoreHandleTypeFlagBits handleType;
+ int fd;
+ };
+ static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class ExternalFenceHandleTypeFlagBits
+ {
+ eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
+ eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
+ };
+
+ using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits, VkExternalFenceHandleTypeFlags>;
+
+ VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 )
+ {
+ return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits )
+ {
+ return ~( ExternalFenceHandleTypeFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd)
+ };
+ };
+
+ using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
+
+ struct PhysicalDeviceExternalFenceInfo
+ {
+ PhysicalDeviceExternalFenceInfo( ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd )
+ : handleType( handleType_ )
+ {
+ }
+
+ PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) );
+ }
+
+ PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) );
+ return *this;
+ }
+ PhysicalDeviceExternalFenceInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceExternalFenceInfo& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkPhysicalDeviceExternalFenceInfo&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>(this);
+ }
+
+ bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalFenceHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+
+ struct ExportFenceCreateInfo
+ {
+ ExportFenceCreateInfo( ExternalFenceHandleTypeFlags handleTypes_ = ExternalFenceHandleTypeFlags() )
+ : handleTypes( handleTypes_ )
+ {
+ }
+
+ ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) );
+ }
+
+ ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) );
+ return *this;
+ }
+ ExportFenceCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ExportFenceCreateInfo& setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ )
+ {
+ handleTypes = handleTypes_;
+ return *this;
+ }
+
+ operator const VkExportFenceCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkExportFenceCreateInfo*>(this);
+ }
+
+ bool operator==( ExportFenceCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( handleTypes == rhs.handleTypes );
+ }
+
+ bool operator!=( ExportFenceCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExportFenceCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ ExternalFenceHandleTypeFlags handleTypes;
+ };
+ static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
+
+ using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct FenceGetWin32HandleInfoKHR
+ {
+ FenceGetWin32HandleInfoKHR( Fence fence_ = Fence(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd )
+ : fence( fence_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
+ }
+
+ FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
+ return *this;
+ }
+ FenceGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ FenceGetWin32HandleInfoKHR& setFence( Fence fence_ )
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ FenceGetWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkFenceGetWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Fence fence;
+ ExternalFenceHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct FenceGetFdInfoKHR
+ {
+ FenceGetFdInfoKHR( Fence fence_ = Fence(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd )
+ : fence( fence_ )
+ , handleType( handleType_ )
+ {
+ }
+
+ FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) );
+ }
+
+ FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) );
+ return *this;
+ }
+ FenceGetFdInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ FenceGetFdInfoKHR& setFence( Fence fence_ )
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ FenceGetFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ operator const VkFenceGetFdInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkFenceGetFdInfoKHR*>(this);
+ }
+
+ bool operator==( FenceGetFdInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
+ && ( handleType == rhs.handleType );
+ }
+
+ bool operator!=( FenceGetFdInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eFenceGetFdInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Fence fence;
+ ExternalFenceHandleTypeFlagBits handleType;
+ };
+ static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class ExternalFenceFeatureFlagBits
+ {
+ eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+ eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+ eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
+ eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
+ };
+
+ using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits, VkExternalFenceFeatureFlags>;
+
+ VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 )
+ {
+ return ExternalFenceFeatureFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits )
+ {
+ return ~( ExternalFenceFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable)
+ };
+ };
+
+ using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
+
+ struct ExternalFenceProperties
+ {
+ operator const VkExternalFenceProperties&() const
+ {
+ return *reinterpret_cast<const VkExternalFenceProperties*>(this);
+ }
+
+ bool operator==( ExternalFenceProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+ && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+ && ( externalFenceFeatures == rhs.externalFenceFeatures );
+ }
+
+ bool operator!=( ExternalFenceProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eExternalFenceProperties;
+
+ public:
+ void* pNext = nullptr;
+ ExternalFenceHandleTypeFlags exportFromImportedHandleTypes;
+ ExternalFenceHandleTypeFlags compatibleHandleTypes;
+ ExternalFenceFeatureFlags externalFenceFeatures;
+ };
+ static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
+
+ using ExternalFencePropertiesKHR = ExternalFenceProperties;
+
+ enum class FenceImportFlagBits
+ {
+ eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT,
+ eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT
+ };
+
+ using FenceImportFlags = Flags<FenceImportFlagBits, VkFenceImportFlags>;
+
+ VULKAN_HPP_INLINE FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 )
+ {
+ return FenceImportFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE FenceImportFlags operator~( FenceImportFlagBits bits )
+ {
+ return ~( FenceImportFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<FenceImportFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(FenceImportFlagBits::eTemporary)
+ };
+ };
+
+ using FenceImportFlagsKHR = FenceImportFlags;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct ImportFenceWin32HandleInfoKHR
+ {
+ ImportFenceWin32HandleInfoKHR( Fence fence_ = Fence(), FenceImportFlags flags_ = FenceImportFlags(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 )
+ : fence( fence_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , handle( handle_ )
+ , name( name_ )
+ {
+ }
+
+ ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
+ }
+
+ ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
+ return *this;
+ }
+ ImportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR& setFence( Fence fence_ )
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR& setFlags( FenceImportFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR& setHandle( HANDLE handle_ )
+ {
+ handle = handle_;
+ return *this;
+ }
+
+ ImportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ )
+ {
+ name = name_;
+ return *this;
+ }
+
+ operator const VkImportFenceWin32HandleInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>(this);
+ }
+
+ bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( handle == rhs.handle )
+ && ( name == rhs.name );
+ }
+
+ bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Fence fence;
+ FenceImportFlags flags;
+ ExternalFenceHandleTypeFlagBits handleType;
+ HANDLE handle;
+ LPCWSTR name;
+ };
+ static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ struct ImportFenceFdInfoKHR
+ {
+ ImportFenceFdInfoKHR( Fence fence_ = Fence(), FenceImportFlags flags_ = FenceImportFlags(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = 0 )
+ : fence( fence_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , fd( fd_ )
+ {
+ }
+
+ ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) );
+ }
+
+ ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) );
+ return *this;
+ }
+ ImportFenceFdInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImportFenceFdInfoKHR& setFence( Fence fence_ )
+ {
+ fence = fence_;
+ return *this;
+ }
+
+ ImportFenceFdInfoKHR& setFlags( FenceImportFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ ImportFenceFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
+ {
+ handleType = handleType_;
+ return *this;
+ }
+
+ ImportFenceFdInfoKHR& setFd( int fd_ )
+ {
+ fd = fd_;
+ return *this;
+ }
+
+ operator const VkImportFenceFdInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkImportFenceFdInfoKHR*>(this);
+ }
+
+ bool operator==( ImportFenceFdInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( fence == rhs.fence )
+ && ( flags == rhs.flags )
+ && ( handleType == rhs.handleType )
+ && ( fd == rhs.fd );
+ }
+
+ bool operator!=( ImportFenceFdInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eImportFenceFdInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ Fence fence;
+ FenceImportFlags flags;
+ ExternalFenceHandleTypeFlagBits handleType;
+ int fd;
+ };
+ static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class SurfaceCounterFlagBitsEXT
+ {
+ eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
+ };
+
+ using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
+
+ VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 )
+ {
+ return SurfaceCounterFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
+ {
+ return ~( SurfaceCounterFlagsEXT( bits ) );
+ }
+
+ template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
+ {
+ enum
+ {
+ allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
+ };
+ };
+
+ struct SurfaceCapabilities2EXT
+ {
+ operator const VkSurfaceCapabilities2EXT&() const
+ {
+ return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>(this);
+ }
+
+ bool operator==( SurfaceCapabilities2EXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( minImageCount == rhs.minImageCount )
+ && ( maxImageCount == rhs.maxImageCount )
+ && ( currentExtent == rhs.currentExtent )
+ && ( minImageExtent == rhs.minImageExtent )
+ && ( maxImageExtent == rhs.maxImageExtent )
+ && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+ && ( supportedTransforms == rhs.supportedTransforms )
+ && ( currentTransform == rhs.currentTransform )
+ && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+ && ( supportedUsageFlags == rhs.supportedUsageFlags )
+ && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
+ }
+
+ bool operator!=( SurfaceCapabilities2EXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t minImageCount;
+ uint32_t maxImageCount;
+ Extent2D currentExtent;
+ Extent2D minImageExtent;
+ Extent2D maxImageExtent;
+ uint32_t maxImageArrayLayers;
+ SurfaceTransformFlagsKHR supportedTransforms;
+ SurfaceTransformFlagBitsKHR currentTransform;
+ CompositeAlphaFlagsKHR supportedCompositeAlpha;
+ ImageUsageFlags supportedUsageFlags;
+ SurfaceCounterFlagsEXT supportedSurfaceCounters;
+ };
+ static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
+
+ struct SwapchainCounterCreateInfoEXT
+ {
+ SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() )
+ : surfaceCounters( surfaceCounters_ )
+ {
+ }
+
+ SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
+ }
+
+ SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
+ return *this;
+ }
+ SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ )
+ {
+ surfaceCounters = surfaceCounters_;
+ return *this;
+ }
+
+ operator const VkSwapchainCounterCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>(this);
+ }
+
+ bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( surfaceCounters == rhs.surfaceCounters );
+ }
+
+ bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ SurfaceCounterFlagsEXT surfaceCounters;
+ };
+ static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DisplayPowerStateEXT
+ {
+ eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
+ eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
+ eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+ };
+
+ struct DisplayPowerInfoEXT
+ {
+ DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff )
+ : powerState( powerState_ )
+ {
+ }
+
+ DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) );
+ }
+
+ DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) );
+ return *this;
+ }
+ DisplayPowerInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ )
+ {
+ powerState = powerState_;
+ return *this;
+ }
+
+ operator const VkDisplayPowerInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDisplayPowerInfoEXT*>(this);
+ }
+
+ bool operator==( DisplayPowerInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( powerState == rhs.powerState );
+ }
+
+ bool operator!=( DisplayPowerInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDisplayPowerInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DisplayPowerStateEXT powerState;
+ };
+ static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DeviceEventTypeEXT
+ {
+ eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
+ };
+
+ struct DeviceEventInfoEXT
+ {
+ DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug )
+ : deviceEvent( deviceEvent_ )
+ {
+ }
+
+ DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) );
+ }
+
+ DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) );
+ return *this;
+ }
+ DeviceEventInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ )
+ {
+ deviceEvent = deviceEvent_;
+ return *this;
+ }
+
+ operator const VkDeviceEventInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDeviceEventInfoEXT*>(this);
+ }
+
+ bool operator==( DeviceEventInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( deviceEvent == rhs.deviceEvent );
+ }
+
+ bool operator!=( DeviceEventInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceEventInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceEventTypeEXT deviceEvent;
+ };
+ static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DisplayEventTypeEXT
+ {
+ eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
+ };
+
+ struct DisplayEventInfoEXT
+ {
+ DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut )
+ : displayEvent( displayEvent_ )
+ {
+ }
+
+ DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) );
+ }
+
+ DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) );
+ return *this;
+ }
+ DisplayEventInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ )
+ {
+ displayEvent = displayEvent_;
+ return *this;
+ }
+
+ operator const VkDisplayEventInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDisplayEventInfoEXT*>(this);
+ }
+
+ bool operator==( DisplayEventInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( displayEvent == rhs.displayEvent );
+ }
+
+ bool operator!=( DisplayEventInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDisplayEventInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DisplayEventTypeEXT displayEvent;
+ };
+ static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class PeerMemoryFeatureFlagBits
+ {
+ eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+ eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+ eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+ eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+ eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+ eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+ eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
+ eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
+ };
+
+ using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits, VkPeerMemoryFeatureFlags>;
+
+ VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 )
+ {
+ return PeerMemoryFeatureFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits )
+ {
+ return ~( PeerMemoryFeatureFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst)
+ };
+ };
+
+ using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
+
+ enum class MemoryAllocateFlagBits
+ {
+ eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+ eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT
+ };
+
+ using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits, VkMemoryAllocateFlags>;
+
+ VULKAN_HPP_INLINE MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 )
+ {
+ return MemoryAllocateFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits )
+ {
+ return ~( MemoryAllocateFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<MemoryAllocateFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask)
+ };
+ };
+
+ using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
+
+ struct MemoryAllocateFlagsInfo
+ {
+ MemoryAllocateFlagsInfo( MemoryAllocateFlags flags_ = MemoryAllocateFlags(), uint32_t deviceMask_ = 0 )
+ : flags( flags_ )
+ , deviceMask( deviceMask_ )
+ {
+ }
+
+ MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) );
+ }
+
+ MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) );
+ return *this;
+ }
+ MemoryAllocateFlagsInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryAllocateFlagsInfo& setFlags( MemoryAllocateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ MemoryAllocateFlagsInfo& setDeviceMask( uint32_t deviceMask_ )
+ {
+ deviceMask = deviceMask_;
+ return *this;
+ }
+
+ operator const VkMemoryAllocateFlagsInfo&() const
+ {
+ return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>(this);
+ }
+
+ bool operator==( MemoryAllocateFlagsInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( deviceMask == rhs.deviceMask );
+ }
+
+ bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+
+ public:
+ const void* pNext = nullptr;
+ MemoryAllocateFlags flags;
+ uint32_t deviceMask;
+ };
+ static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
+
+ using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+
+ enum class DeviceGroupPresentModeFlagBitsKHR
+ {
+ eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
+ eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
+ eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
+ eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
+ };
+
+ using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR, VkDeviceGroupPresentModeFlagsKHR>;
+
+ VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 )
+ {
+ return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits )
+ {
+ return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice)
+ };
+ };
+
+ struct DeviceGroupPresentCapabilitiesKHR
+ {
+ operator const VkDeviceGroupPresentCapabilitiesKHR&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>(this);
+ }
+
+ bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 )
+ && ( modes == rhs.modes );
+ }
+
+ bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE];
+ DeviceGroupPresentModeFlagsKHR modes;
+ };
+ static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+ struct DeviceGroupPresentInfoKHR
+ {
+ DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0, const uint32_t* pDeviceMasks_ = nullptr, DeviceGroupPresentModeFlagBitsKHR mode_ = DeviceGroupPresentModeFlagBitsKHR::eLocal )
+ : swapchainCount( swapchainCount_ )
+ , pDeviceMasks( pDeviceMasks_ )
+ , mode( mode_ )
+ {
+ }
+
+ DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) );
+ }
+
+ DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) );
+ return *this;
+ }
+ DeviceGroupPresentInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupPresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+ {
+ swapchainCount = swapchainCount_;
+ return *this;
+ }
+
+ DeviceGroupPresentInfoKHR& setPDeviceMasks( const uint32_t* pDeviceMasks_ )
+ {
+ pDeviceMasks = pDeviceMasks_;
+ return *this;
+ }
+
+ DeviceGroupPresentInfoKHR& setMode( DeviceGroupPresentModeFlagBitsKHR mode_ )
+ {
+ mode = mode_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupPresentInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>(this);
+ }
+
+ bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( swapchainCount == rhs.swapchainCount )
+ && ( pDeviceMasks == rhs.pDeviceMasks )
+ && ( mode == rhs.mode );
+ }
+
+ bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t swapchainCount;
+ const uint32_t* pDeviceMasks;
+ DeviceGroupPresentModeFlagBitsKHR mode;
+ };
+ static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
+
+ struct DeviceGroupSwapchainCreateInfoKHR
+ {
+ DeviceGroupSwapchainCreateInfoKHR( DeviceGroupPresentModeFlagsKHR modes_ = DeviceGroupPresentModeFlagsKHR() )
+ : modes( modes_ )
+ {
+ }
+
+ DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) );
+ }
+
+ DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) );
+ return *this;
+ }
+ DeviceGroupSwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupSwapchainCreateInfoKHR& setModes( DeviceGroupPresentModeFlagsKHR modes_ )
+ {
+ modes = modes_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupSwapchainCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>(this);
+ }
+
+ bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( modes == rhs.modes );
+ }
+
+ bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ DeviceGroupPresentModeFlagsKHR modes;
+ };
+ static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class SwapchainCreateFlagBitsKHR
+ {
+ eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+ eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR
+ };
+
+ using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
+
+ VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
+ {
+ return SwapchainCreateFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits )
+ {
+ return ~( SwapchainCreateFlagsKHR( bits ) );
+ }
+
+ template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
+ {
+ enum
+ {
+ allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected)
+ };
+ };
+
+ struct SwapchainCreateInfoKHR
+ {
+ SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
+ : flags( flags_ )
+ , surface( surface_ )
+ , minImageCount( minImageCount_ )
+ , imageFormat( imageFormat_ )
+ , imageColorSpace( imageColorSpace_ )
+ , imageExtent( imageExtent_ )
+ , imageArrayLayers( imageArrayLayers_ )
+ , imageUsage( imageUsage_ )
+ , imageSharingMode( imageSharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ , preTransform( preTransform_ )
+ , compositeAlpha( compositeAlpha_ )
+ , presentMode( presentMode_ )
+ , clipped( clipped_ )
+ , oldSwapchain( oldSwapchain_ )
+ {
+ }
+
+ SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) );
+ }
+
+ SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) );
+ return *this;
+ }
+ SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+ {
+ surface = surface_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+ {
+ minImageCount = minImageCount_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+ {
+ imageFormat = imageFormat_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
+ {
+ imageColorSpace = imageColorSpace_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
+ {
+ imageArrayLayers = imageArrayLayers_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
+ {
+ imageUsage = imageUsage_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
+ {
+ imageSharingMode = imageSharingMode_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+ {
+ queueFamilyIndexCount = queueFamilyIndexCount_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+ {
+ pQueueFamilyIndices = pQueueFamilyIndices_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
+ {
+ preTransform = preTransform_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
+ {
+ compositeAlpha = compositeAlpha_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
+ {
+ presentMode = presentMode_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
+ {
+ clipped = clipped_;
+ return *this;
+ }
+
+ SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
+ {
+ oldSwapchain = oldSwapchain_;
+ return *this;
+ }
+
+ operator const VkSwapchainCreateInfoKHR&() const
+ {
+ return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
+ }
+
+ bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( surface == rhs.surface )
+ && ( minImageCount == rhs.minImageCount )
+ && ( imageFormat == rhs.imageFormat )
+ && ( imageColorSpace == rhs.imageColorSpace )
+ && ( imageExtent == rhs.imageExtent )
+ && ( imageArrayLayers == rhs.imageArrayLayers )
+ && ( imageUsage == rhs.imageUsage )
+ && ( imageSharingMode == rhs.imageSharingMode )
+ && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+ && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+ && ( preTransform == rhs.preTransform )
+ && ( compositeAlpha == rhs.compositeAlpha )
+ && ( presentMode == rhs.presentMode )
+ && ( clipped == rhs.clipped )
+ && ( oldSwapchain == rhs.oldSwapchain );
+ }
+
+ bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+
+ public:
+ const void* pNext = nullptr;
+ SwapchainCreateFlagsKHR flags;
+ SurfaceKHR surface;
+ uint32_t minImageCount;
+ Format imageFormat;
+ ColorSpaceKHR imageColorSpace;
+ Extent2D imageExtent;
+ uint32_t imageArrayLayers;
+ ImageUsageFlags imageUsage;
+ SharingMode imageSharingMode;
+ uint32_t queueFamilyIndexCount;
+ const uint32_t* pQueueFamilyIndices;
+ SurfaceTransformFlagBitsKHR preTransform;
+ CompositeAlphaFlagBitsKHR compositeAlpha;
+ PresentModeKHR presentMode;
+ Bool32 clipped;
+ SwapchainKHR oldSwapchain;
+ };
+ static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+
+ enum class ViewportCoordinateSwizzleNV
+ {
+ ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+ eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
+ ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+ eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
+ ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
+ eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
+ ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
+ eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+ };
+
+ struct ViewportSwizzleNV
+ {
+ ViewportSwizzleNV( ViewportCoordinateSwizzleNV x_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV y_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV z_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV w_ = ViewportCoordinateSwizzleNV::ePositiveX )
+ : x( x_ )
+ , y( y_ )
+ , z( z_ )
+ , w( w_ )
+ {
+ }
+
+ ViewportSwizzleNV( VkViewportSwizzleNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) );
+ }
+
+ ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) );
+ return *this;
+ }
+ ViewportSwizzleNV& setX( ViewportCoordinateSwizzleNV x_ )
+ {
+ x = x_;
+ return *this;
+ }
+
+ ViewportSwizzleNV& setY( ViewportCoordinateSwizzleNV y_ )
+ {
+ y = y_;
+ return *this;
+ }
+
+ ViewportSwizzleNV& setZ( ViewportCoordinateSwizzleNV z_ )
+ {
+ z = z_;
+ return *this;
+ }
+
+ ViewportSwizzleNV& setW( ViewportCoordinateSwizzleNV w_ )
+ {
+ w = w_;
+ return *this;
+ }
+
+ operator const VkViewportSwizzleNV&() const
+ {
+ return *reinterpret_cast<const VkViewportSwizzleNV*>(this);
+ }
+
+ bool operator==( ViewportSwizzleNV const& rhs ) const
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y )
+ && ( z == rhs.z )
+ && ( w == rhs.w );
+ }
+
+ bool operator!=( ViewportSwizzleNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ ViewportCoordinateSwizzleNV x;
+ ViewportCoordinateSwizzleNV y;
+ ViewportCoordinateSwizzleNV z;
+ ViewportCoordinateSwizzleNV w;
+ };
+ static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+
+ struct PipelineViewportSwizzleStateCreateInfoNV
+ {
+ PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV flags_ = PipelineViewportSwizzleStateCreateFlagsNV(), uint32_t viewportCount_ = 0, const ViewportSwizzleNV* pViewportSwizzles_ = nullptr )
+ : flags( flags_ )
+ , viewportCount( viewportCount_ )
+ , pViewportSwizzles( pViewportSwizzles_ )
+ {
+ }
+
+ PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
+ }
+
+ PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
+ return *this;
+ }
+ PipelineViewportSwizzleStateCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineViewportSwizzleStateCreateInfoNV& setFlags( PipelineViewportSwizzleStateCreateFlagsNV flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineViewportSwizzleStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
+ {
+ viewportCount = viewportCount_;
+ return *this;
+ }
+
+ PipelineViewportSwizzleStateCreateInfoNV& setPViewportSwizzles( const ViewportSwizzleNV* pViewportSwizzles_ )
+ {
+ pViewportSwizzles = pViewportSwizzles_;
+ return *this;
+ }
+
+ operator const VkPipelineViewportSwizzleStateCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>(this);
+ }
+
+ bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( viewportCount == rhs.viewportCount )
+ && ( pViewportSwizzles == rhs.pViewportSwizzles );
+ }
+
+ bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineViewportSwizzleStateCreateFlagsNV flags;
+ uint32_t viewportCount;
+ const ViewportSwizzleNV* pViewportSwizzles;
+ };
+ static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+ enum class DiscardRectangleModeEXT
+ {
+ eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+ eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+ };
+
+ struct PipelineDiscardRectangleStateCreateInfoEXT
+ {
+ PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT flags_ = PipelineDiscardRectangleStateCreateFlagsEXT(), DiscardRectangleModeEXT discardRectangleMode_ = DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = 0, const Rect2D* pDiscardRectangles_ = nullptr )
+ : flags( flags_ )
+ , discardRectangleMode( discardRectangleMode_ )
+ , discardRectangleCount( discardRectangleCount_ )
+ , pDiscardRectangles( pDiscardRectangles_ )
+ {
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
+ return *this;
+ }
+ PipelineDiscardRectangleStateCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT& setFlags( PipelineDiscardRectangleStateCreateFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleMode( DiscardRectangleModeEXT discardRectangleMode_ )
+ {
+ discardRectangleMode = discardRectangleMode_;
+ return *this;
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleCount( uint32_t discardRectangleCount_ )
+ {
+ discardRectangleCount = discardRectangleCount_;
+ return *this;
+ }
+
+ PipelineDiscardRectangleStateCreateInfoEXT& setPDiscardRectangles( const Rect2D* pDiscardRectangles_ )
+ {
+ pDiscardRectangles = pDiscardRectangles_;
+ return *this;
+ }
+
+ operator const VkPipelineDiscardRectangleStateCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>(this);
+ }
+
+ bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( discardRectangleMode == rhs.discardRectangleMode )
+ && ( discardRectangleCount == rhs.discardRectangleCount )
+ && ( pDiscardRectangles == rhs.pDiscardRectangles );
+ }
+
+ bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineDiscardRectangleStateCreateFlagsEXT flags;
+ DiscardRectangleModeEXT discardRectangleMode;
+ uint32_t discardRectangleCount;
+ const Rect2D* pDiscardRectangles;
+ };
+ static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class SubpassDescriptionFlagBits
+ {
+ ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+ ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
+ };
+
+ using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
+
+ VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
+ {
+ return SubpassDescriptionFlags( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits )
+ {
+ return ~( SubpassDescriptionFlags( bits ) );
+ }
+
+ template <> struct FlagTraits<SubpassDescriptionFlagBits>
+ {
+ enum
+ {
+ allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX)
+ };
+ };
+
+ struct SubpassDescription
+ {
+ SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = 0, const AttachmentReference* pInputAttachments_ = nullptr, uint32_t colorAttachmentCount_ = 0, const AttachmentReference* pColorAttachments_ = nullptr, const AttachmentReference* pResolveAttachments_ = nullptr, const AttachmentReference* pDepthStencilAttachment_ = nullptr, uint32_t preserveAttachmentCount_ = 0, const uint32_t* pPreserveAttachments_ = nullptr )
+ : flags( flags_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
+ , inputAttachmentCount( inputAttachmentCount_ )
+ , pInputAttachments( pInputAttachments_ )
+ , colorAttachmentCount( colorAttachmentCount_ )
+ , pColorAttachments( pColorAttachments_ )
+ , pResolveAttachments( pResolveAttachments_ )
+ , pDepthStencilAttachment( pDepthStencilAttachment_ )
+ , preserveAttachmentCount( preserveAttachmentCount_ )
+ , pPreserveAttachments( pPreserveAttachments_ )
+ {
+ }
+
+ SubpassDescription( VkSubpassDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubpassDescription ) );
+ }
+
+ SubpassDescription& operator=( VkSubpassDescription const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubpassDescription ) );
+ return *this;
+ }
+ SubpassDescription& setFlags( SubpassDescriptionFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+ {
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
+ }
+
+ SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
+ {
+ inputAttachmentCount = inputAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ )
+ {
+ pInputAttachments = pInputAttachments_;
+ return *this;
+ }
+
+ SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
+ {
+ colorAttachmentCount = colorAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ )
+ {
+ pColorAttachments = pColorAttachments_;
+ return *this;
+ }
+
+ SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ )
+ {
+ pResolveAttachments = pResolveAttachments_;
+ return *this;
+ }
+
+ SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ )
+ {
+ pDepthStencilAttachment = pDepthStencilAttachment_;
+ return *this;
+ }
+
+ SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
+ {
+ preserveAttachmentCount = preserveAttachmentCount_;
+ return *this;
+ }
+
+ SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
+ {
+ pPreserveAttachments = pPreserveAttachments_;
+ return *this;
+ }
+
+ operator const VkSubpassDescription&() const
+ {
+ return *reinterpret_cast<const VkSubpassDescription*>(this);
+ }
+
+ bool operator==( SubpassDescription const& rhs ) const
+ {
+ return ( flags == rhs.flags )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( inputAttachmentCount == rhs.inputAttachmentCount )
+ && ( pInputAttachments == rhs.pInputAttachments )
+ && ( colorAttachmentCount == rhs.colorAttachmentCount )
+ && ( pColorAttachments == rhs.pColorAttachments )
+ && ( pResolveAttachments == rhs.pResolveAttachments )
+ && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+ && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+ && ( pPreserveAttachments == rhs.pPreserveAttachments );
+ }
+
+ bool operator!=( SubpassDescription const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ SubpassDescriptionFlags flags;
+ PipelineBindPoint pipelineBindPoint;
+ uint32_t inputAttachmentCount;
+ const AttachmentReference* pInputAttachments;
+ uint32_t colorAttachmentCount;
+ const AttachmentReference* pColorAttachments;
+ const AttachmentReference* pResolveAttachments;
+ const AttachmentReference* pDepthStencilAttachment;
+ uint32_t preserveAttachmentCount;
+ const uint32_t* pPreserveAttachments;
+ };
+ static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+
+ struct RenderPassCreateInfo
+ {
+ RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
+ : flags( flags_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , subpassCount( subpassCount_ )
+ , pSubpasses( pSubpasses_ )
+ , dependencyCount( dependencyCount_ )
+ , pDependencies( pDependencies_ )
+ {
+ }
+
+ RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) );
+ }
+
+ RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) );
+ return *this;
+ }
+ RenderPassCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+ {
+ attachmentCount = attachmentCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
+ {
+ pAttachments = pAttachments_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
+ {
+ subpassCount = subpassCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
+ {
+ pSubpasses = pSubpasses_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
+ {
+ dependencyCount = dependencyCount_;
+ return *this;
+ }
+
+ RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
+ {
+ pDependencies = pDependencies_;
+ return *this;
+ }
+
+ operator const VkRenderPassCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
+ }
+
+ bool operator==( RenderPassCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( attachmentCount == rhs.attachmentCount )
+ && ( pAttachments == rhs.pAttachments )
+ && ( subpassCount == rhs.subpassCount )
+ && ( pSubpasses == rhs.pSubpasses )
+ && ( dependencyCount == rhs.dependencyCount )
+ && ( pDependencies == rhs.pDependencies );
+ }
+
+ bool operator!=( RenderPassCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eRenderPassCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ RenderPassCreateFlags flags;
+ uint32_t attachmentCount;
+ const AttachmentDescription* pAttachments;
+ uint32_t subpassCount;
+ const SubpassDescription* pSubpasses;
+ uint32_t dependencyCount;
+ const SubpassDependency* pDependencies;
+ };
+ static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+
+ enum class PointClippingBehavior
+ {
+ eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+ eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+ eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
+ eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
+ };
+
+ struct PhysicalDevicePointClippingProperties
+ {
+ operator const VkPhysicalDevicePointClippingProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>(this);
+ }
+
+ bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pointClippingBehavior == rhs.pointClippingBehavior );
+ }
+
+ bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+
+ public:
+ void* pNext = nullptr;
+ PointClippingBehavior pointClippingBehavior;
+ };
+ static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
+
+ using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
+
+ enum class SamplerReductionModeEXT
+ {
+ eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
+ eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT,
+ eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT
+ };
+
+ struct SamplerReductionModeCreateInfoEXT
+ {
+ SamplerReductionModeCreateInfoEXT( SamplerReductionModeEXT reductionMode_ = SamplerReductionModeEXT::eWeightedAverage )
+ : reductionMode( reductionMode_ )
+ {
+ }
+
+ SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) );
+ }
+
+ SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) );
+ return *this;
+ }
+ SamplerReductionModeCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SamplerReductionModeCreateInfoEXT& setReductionMode( SamplerReductionModeEXT reductionMode_ )
+ {
+ reductionMode = reductionMode_;
+ return *this;
+ }
+
+ operator const VkSamplerReductionModeCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT*>(this);
+ }
+
+ bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( reductionMode == rhs.reductionMode );
+ }
+
+ bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ SamplerReductionModeEXT reductionMode;
+ };
+ static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class TessellationDomainOrigin
+ {
+ eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+ eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+ eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
+ eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
+ };
+
+ struct PipelineTessellationDomainOriginStateCreateInfo
+ {
+ PipelineTessellationDomainOriginStateCreateInfo( TessellationDomainOrigin domainOrigin_ = TessellationDomainOrigin::eUpperLeft )
+ : domainOrigin( domainOrigin_ )
+ {
+ }
+
+ PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) );
+ }
+
+ PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) );
+ return *this;
+ }
+ PipelineTessellationDomainOriginStateCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineTessellationDomainOriginStateCreateInfo& setDomainOrigin( TessellationDomainOrigin domainOrigin_ )
+ {
+ domainOrigin = domainOrigin_;
+ return *this;
+ }
+
+ operator const VkPipelineTessellationDomainOriginStateCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>(this);
+ }
+
+ bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( domainOrigin == rhs.domainOrigin );
+ }
+
+ bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ TessellationDomainOrigin domainOrigin;
+ };
+ static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
+
+ using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
+
+ enum class SamplerYcbcrModelConversion
+ {
+ eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+ eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+ eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+ eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+ eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+ eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+ eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+ eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+ eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
+ eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
+ };
+
+ enum class SamplerYcbcrRange
+ {
+ eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+ eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+ eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
+ eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
+ };
+
+ enum class ChromaLocation
+ {
+ eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
+ eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN,
+ eMidpoint = VK_CHROMA_LOCATION_MIDPOINT,
+ eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT
+ };
+
+ struct SamplerYcbcrConversionCreateInfo
+ {
+ SamplerYcbcrConversionCreateInfo( Format format_ = Format::eUndefined, SamplerYcbcrModelConversion ycbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, SamplerYcbcrRange ycbcrRange_ = SamplerYcbcrRange::eItuFull, ComponentMapping components_ = ComponentMapping(), ChromaLocation xChromaOffset_ = ChromaLocation::eCositedEven, ChromaLocation yChromaOffset_ = ChromaLocation::eCositedEven, Filter chromaFilter_ = Filter::eNearest, Bool32 forceExplicitReconstruction_ = 0 )
+ : format( format_ )
+ , ycbcrModel( ycbcrModel_ )
+ , ycbcrRange( ycbcrRange_ )
+ , components( components_ )
+ , xChromaOffset( xChromaOffset_ )
+ , yChromaOffset( yChromaOffset_ )
+ , chromaFilter( chromaFilter_ )
+ , forceExplicitReconstruction( forceExplicitReconstruction_ )
+ {
+ }
+
+ SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) );
+ }
+
+ SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) );
+ return *this;
+ }
+ SamplerYcbcrConversionCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setFormat( Format format_ )
+ {
+ format = format_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setYcbcrModel( SamplerYcbcrModelConversion ycbcrModel_ )
+ {
+ ycbcrModel = ycbcrModel_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setYcbcrRange( SamplerYcbcrRange ycbcrRange_ )
+ {
+ ycbcrRange = ycbcrRange_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setComponents( ComponentMapping components_ )
+ {
+ components = components_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setXChromaOffset( ChromaLocation xChromaOffset_ )
+ {
+ xChromaOffset = xChromaOffset_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setYChromaOffset( ChromaLocation yChromaOffset_ )
+ {
+ yChromaOffset = yChromaOffset_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setChromaFilter( Filter chromaFilter_ )
+ {
+ chromaFilter = chromaFilter_;
+ return *this;
+ }
+
+ SamplerYcbcrConversionCreateInfo& setForceExplicitReconstruction( Bool32 forceExplicitReconstruction_ )
+ {
+ forceExplicitReconstruction = forceExplicitReconstruction_;
+ return *this;
+ }
+
+ operator const VkSamplerYcbcrConversionCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>(this);
+ }
+
+ bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( format == rhs.format )
+ && ( ycbcrModel == rhs.ycbcrModel )
+ && ( ycbcrRange == rhs.ycbcrRange )
+ && ( components == rhs.components )
+ && ( xChromaOffset == rhs.xChromaOffset )
+ && ( yChromaOffset == rhs.yChromaOffset )
+ && ( chromaFilter == rhs.chromaFilter )
+ && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
+ }
+
+ bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ Format format;
+ SamplerYcbcrModelConversion ycbcrModel;
+ SamplerYcbcrRange ycbcrRange;
+ ComponentMapping components;
+ ChromaLocation xChromaOffset;
+ ChromaLocation yChromaOffset;
+ Filter chromaFilter;
+ Bool32 forceExplicitReconstruction;
+ };
+ static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
+
+ using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ struct AndroidHardwareBufferFormatPropertiesANDROID
+ {
+ operator const VkAndroidHardwareBufferFormatPropertiesANDROID&() const
+ {
+ return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>(this);
+ }
+
+ bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( format == rhs.format )
+ && ( externalFormat == rhs.externalFormat )
+ && ( formatFeatures == rhs.formatFeatures )
+ && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
+ && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
+ && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
+ && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
+ && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+ }
+
+ bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+
+ public:
+ void* pNext = nullptr;
+ Format format;
+ uint64_t externalFormat;
+ FormatFeatureFlags formatFeatures;
+ ComponentMapping samplerYcbcrConversionComponents;
+ SamplerYcbcrModelConversion suggestedYcbcrModel;
+ SamplerYcbcrRange suggestedYcbcrRange;
+ ChromaLocation suggestedXChromaOffset;
+ ChromaLocation suggestedYChromaOffset;
+ };
+ static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+ enum class BlendOverlapEXT
+ {
+ eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+ eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
+ eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
+ };
+
+ struct PipelineColorBlendAdvancedStateCreateInfoEXT
+ {
+ PipelineColorBlendAdvancedStateCreateInfoEXT( Bool32 srcPremultiplied_ = 0, Bool32 dstPremultiplied_ = 0, BlendOverlapEXT blendOverlap_ = BlendOverlapEXT::eUncorrelated )
+ : srcPremultiplied( srcPremultiplied_ )
+ , dstPremultiplied( dstPremultiplied_ )
+ , blendOverlap( blendOverlap_ )
+ {
+ }
+
+ PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
+ }
+
+ PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
+ return *this;
+ }
+ PipelineColorBlendAdvancedStateCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineColorBlendAdvancedStateCreateInfoEXT& setSrcPremultiplied( Bool32 srcPremultiplied_ )
+ {
+ srcPremultiplied = srcPremultiplied_;
+ return *this;
+ }
+
+ PipelineColorBlendAdvancedStateCreateInfoEXT& setDstPremultiplied( Bool32 dstPremultiplied_ )
+ {
+ dstPremultiplied = dstPremultiplied_;
+ return *this;
+ }
+
+ PipelineColorBlendAdvancedStateCreateInfoEXT& setBlendOverlap( BlendOverlapEXT blendOverlap_ )
+ {
+ blendOverlap = blendOverlap_;
+ return *this;
+ }
+
+ operator const VkPipelineColorBlendAdvancedStateCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>(this);
+ }
+
+ bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcPremultiplied == rhs.srcPremultiplied )
+ && ( dstPremultiplied == rhs.dstPremultiplied )
+ && ( blendOverlap == rhs.blendOverlap );
+ }
+
+ bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ Bool32 srcPremultiplied;
+ Bool32 dstPremultiplied;
+ BlendOverlapEXT blendOverlap;
+ };
+ static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class CoverageModulationModeNV
+ {
+ eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+ eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
+ eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
+ eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+ };
+
+ struct PipelineCoverageModulationStateCreateInfoNV
+ {
+ PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_ = PipelineCoverageModulationStateCreateFlagsNV(), CoverageModulationModeNV coverageModulationMode_ = CoverageModulationModeNV::eNone, Bool32 coverageModulationTableEnable_ = 0, uint32_t coverageModulationTableCount_ = 0, const float* pCoverageModulationTable_ = nullptr )
+ : flags( flags_ )
+ , coverageModulationMode( coverageModulationMode_ )
+ , coverageModulationTableEnable( coverageModulationTableEnable_ )
+ , coverageModulationTableCount( coverageModulationTableCount_ )
+ , pCoverageModulationTable( pCoverageModulationTable_ )
+ {
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
+ return *this;
+ }
+ PipelineCoverageModulationStateCreateInfoNV& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV& setFlags( PipelineCoverageModulationStateCreateFlagsNV flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationMode( CoverageModulationModeNV coverageModulationMode_ )
+ {
+ coverageModulationMode = coverageModulationMode_;
+ return *this;
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableEnable( Bool32 coverageModulationTableEnable_ )
+ {
+ coverageModulationTableEnable = coverageModulationTableEnable_;
+ return *this;
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ )
+ {
+ coverageModulationTableCount = coverageModulationTableCount_;
+ return *this;
+ }
+
+ PipelineCoverageModulationStateCreateInfoNV& setPCoverageModulationTable( const float* pCoverageModulationTable_ )
+ {
+ pCoverageModulationTable = pCoverageModulationTable_;
+ return *this;
+ }
+
+ operator const VkPipelineCoverageModulationStateCreateInfoNV&() const
+ {
+ return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>(this);
+ }
+
+ bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( coverageModulationMode == rhs.coverageModulationMode )
+ && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
+ && ( coverageModulationTableCount == rhs.coverageModulationTableCount )
+ && ( pCoverageModulationTable == rhs.pCoverageModulationTable );
+ }
+
+ bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineCoverageModulationStateCreateFlagsNV flags;
+ CoverageModulationModeNV coverageModulationMode;
+ Bool32 coverageModulationTableEnable;
+ uint32_t coverageModulationTableCount;
+ const float* pCoverageModulationTable;
+ };
+ static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+ enum class ValidationCacheHeaderVersionEXT
+ {
+ eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
+ };
+
+ enum class ShaderInfoTypeAMD
+ {
+ eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
+ eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
+ eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
+ };
+
+ enum class QueueGlobalPriorityEXT
+ {
+ eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
+ eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT,
+ eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT,
+ eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT
+ };
+
+ struct DeviceQueueGlobalPriorityCreateInfoEXT
+ {
+ DeviceQueueGlobalPriorityCreateInfoEXT( QueueGlobalPriorityEXT globalPriority_ = QueueGlobalPriorityEXT::eLow )
+ : globalPriority( globalPriority_ )
+ {
+ }
+
+ DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) );
+ }
+
+ DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) );
+ return *this;
+ }
+ DeviceQueueGlobalPriorityCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceQueueGlobalPriorityCreateInfoEXT& setGlobalPriority( QueueGlobalPriorityEXT globalPriority_ )
+ {
+ globalPriority = globalPriority_;
+ return *this;
+ }
+
+ operator const VkDeviceQueueGlobalPriorityCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>(this);
+ }
+
+ bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( globalPriority == rhs.globalPriority );
+ }
+
+ bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ QueueGlobalPriorityEXT globalPriority;
+ };
+ static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DebugUtilsMessageSeverityFlagBitsEXT
+ {
+ eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+ eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+ eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
+ eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
+ };
+
+ using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT, VkDebugUtilsMessageSeverityFlagsEXT>;
+
+ VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 )
+ {
+ return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits )
+ {
+ return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
+ }
+
+ template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
+ {
+ enum
+ {
+ allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError)
+ };
+ };
+
+ enum class DebugUtilsMessageTypeFlagBitsEXT
+ {
+ eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+ eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+ ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
+ };
+
+ using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT, VkDebugUtilsMessageTypeFlagsEXT>;
+
+ VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 )
+ {
+ return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits )
+ {
+ return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
+ }
+
+ template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
+ {
+ enum
+ {
+ allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance)
+ };
+ };
+
+ struct DebugUtilsMessengerCreateInfoEXT
+ {
+ DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT flags_ = DebugUtilsMessengerCreateFlagsEXT(), DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = DebugUtilsMessageSeverityFlagsEXT(), DebugUtilsMessageTypeFlagsEXT messageType_ = DebugUtilsMessageTypeFlagsEXT(), PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr, void* pUserData_ = nullptr )
+ : flags( flags_ )
+ , messageSeverity( messageSeverity_ )
+ , messageType( messageType_ )
+ , pfnUserCallback( pfnUserCallback_ )
+ , pUserData( pUserData_ )
+ {
+ }
+
+ DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) );
+ }
+
+ DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) );
+ return *this;
+ }
+ DebugUtilsMessengerCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCreateInfoEXT& setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCreateInfoEXT& setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ )
+ {
+ messageSeverity = messageSeverity_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCreateInfoEXT& setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ )
+ {
+ messageType = messageType_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCreateInfoEXT& setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ )
+ {
+ pfnUserCallback = pfnUserCallback_;
+ return *this;
+ }
+
+ DebugUtilsMessengerCreateInfoEXT& setPUserData( void* pUserData_ )
+ {
+ pUserData = pUserData_;
+ return *this;
+ }
+
+ operator const VkDebugUtilsMessengerCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>(this);
+ }
+
+ bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( messageSeverity == rhs.messageSeverity )
+ && ( messageType == rhs.messageType )
+ && ( pfnUserCallback == rhs.pfnUserCallback )
+ && ( pUserData == rhs.pUserData );
+ }
+
+ bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ DebugUtilsMessengerCreateFlagsEXT flags;
+ DebugUtilsMessageSeverityFlagsEXT messageSeverity;
+ DebugUtilsMessageTypeFlagsEXT messageType;
+ PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
+ void* pUserData;
+ };
+ static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class ConservativeRasterizationModeEXT
+ {
+ eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
+ eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
+ eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
+ };
+
+ struct PipelineRasterizationConservativeStateCreateInfoEXT
+ {
+ PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = PipelineRasterizationConservativeStateCreateFlagsEXT(), ConservativeRasterizationModeEXT conservativeRasterizationMode_ = ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = 0 )
+ : flags( flags_ )
+ , conservativeRasterizationMode( conservativeRasterizationMode_ )
+ , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
+ {
+ }
+
+ PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) );
+ }
+
+ PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) );
+ return *this;
+ }
+ PipelineRasterizationConservativeStateCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineRasterizationConservativeStateCreateInfoEXT& setFlags( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ )
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineRasterizationConservativeStateCreateInfoEXT& setConservativeRasterizationMode( ConservativeRasterizationModeEXT conservativeRasterizationMode_ )
+ {
+ conservativeRasterizationMode = conservativeRasterizationMode_;
+ return *this;
+ }
+
+ PipelineRasterizationConservativeStateCreateInfoEXT& setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ )
+ {
+ extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
+ return *this;
+ }
+
+ operator const VkPipelineRasterizationConservativeStateCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>(this);
+ }
+
+ bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode )
+ && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
+ }
+
+ bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ PipelineRasterizationConservativeStateCreateFlagsEXT flags;
+ ConservativeRasterizationModeEXT conservativeRasterizationMode;
+ float extraPrimitiveOverestimationSize;
+ };
+ static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ enum class DescriptorBindingFlagBitsEXT
+ {
+ eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
+ eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT,
+ ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT,
+ eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
+ };
+
+ using DescriptorBindingFlagsEXT = Flags<DescriptorBindingFlagBitsEXT, VkDescriptorBindingFlagsEXT>;
+
+ VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 )
+ {
+ return DescriptorBindingFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits )
+ {
+ return ~( DescriptorBindingFlagsEXT( bits ) );
+ }
+
+ template <> struct FlagTraits<DescriptorBindingFlagBitsEXT>
+ {
+ enum
+ {
+ allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount)
+ };
+ };
+
+ struct DescriptorSetLayoutBindingFlagsCreateInfoEXT
+ {
+ DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0, const DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr )
+ : bindingCount( bindingCount_ )
+ , pBindingFlags( pBindingFlags_ )
+ {
+ }
+
+ DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) );
+ }
+
+ DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) );
+ return *this;
+ }
+ DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DescriptorSetLayoutBindingFlagsCreateInfoEXT& setBindingCount( uint32_t bindingCount_ )
+ {
+ bindingCount = bindingCount_;
+ return *this;
+ }
+
+ DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPBindingFlags( const DescriptorBindingFlagsEXT* pBindingFlags_ )
+ {
+ pBindingFlags = pBindingFlags_;
+ return *this;
+ }
+
+ operator const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT&() const
+ {
+ return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>(this);
+ }
+
+ bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( bindingCount == rhs.bindingCount )
+ && ( pBindingFlags == rhs.pBindingFlags );
+ }
+
+ bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t bindingCount;
+ const DescriptorBindingFlagsEXT* pBindingFlags;
+ };
+ static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" );
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = Dispatch() );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d = Dispatch() );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d)
+ {
+ return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d )
+ {
+ uint32_t apiVersion;
+ Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
+ return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<LayerProperties>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d = Dispatch() );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d)
+ {
+ return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d )
+ {
+ std::vector<LayerProperties,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = Dispatch() );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d)
+ {
+ return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d )
+ {
+ std::vector<ExtensionProperties,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ // forward declarations
+ struct CmdProcessCommandsInfoNVX;
+
+ class CommandBuffer
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR CommandBuffer()
+ : m_commandBuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t )
+ : m_commandBuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer )
+ : m_commandBuffer( commandBuffer )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ CommandBuffer & operator=(VkCommandBuffer commandBuffer)
+ {
+ m_commandBuffer = commandBuffer;
+ return *this;
+ }
+#endif
+
+ CommandBuffer & operator=( std::nullptr_t )
+ {
+ m_commandBuffer = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( CommandBuffer const & rhs ) const
+ {
+ return m_commandBuffer == rhs.m_commandBuffer;
+ }
+
+ bool operator!=(CommandBuffer const & rhs ) const
+ {
+ return m_commandBuffer != rhs.m_commandBuffer;
+ }
+
+ bool operator<(CommandBuffer const & rhs ) const
+ {
+ return m_commandBuffer < rhs.m_commandBuffer;
+ }
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result end(Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type end(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setLineWidth( float lineWidth, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setBlendConstants( const float blendConstants[4], Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = DispatchLoaderStatic>
+ void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = DispatchLoaderStatic>
+ void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void nextSubpass( SubpassContents contents, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void endRenderPass(Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugMarkerEndEXT(Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy<const WriteDescriptorSet> descriptorWrites, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setDeviceMask( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const ViewportWScalingNV> viewportWScalings, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const Rect2D> discardRectangles, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d = Dispatch() ) const;
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const
+ {
+ return m_commandBuffer;
+ }
+
+ explicit operator bool() const
+ {
+ return m_commandBuffer != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_commandBuffer == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkCommandBuffer m_commandBuffer;
+ };
+
+ static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d) const
+ {
+ d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d ) const
+ {
+ d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d) const
+ {
+ d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports, Dispatch const &d ) const
+ {
+ d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d) const
+ {
+ d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors, Dispatch const &d ) const
+ {
+ d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const
+ {
+ d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const
+ {
+ d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const
+ {
+ d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const
+ {
+ d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const
+ {
+ d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const
+ {
+ d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const
+ {
+ d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const
+ {
+ d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const
+ {
+ d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const
+ {
+ d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const
+ {
+ d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const
+ {
+ d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const
+ {
+ d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const
+ {
+ d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const
+ {
+ d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const
+ {
+ d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d) const
+ {
+ d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d ) const
+ {
+ d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d) const
+ {
+ d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+ if ( buffers.size() != offsets.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const
+ {
+ d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const
+ {
+ d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const
+ {
+ d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const
+ {
+ d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const
+ {
+ d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const
+ {
+ d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const
+ {
+ d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const
+ {
+ d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const
+ {
+ d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const
+ {
+ d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d) const
+ {
+ d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d ) const
+ {
+ d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d) const
+ {
+ d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions, Dispatch const &d ) const
+ {
+ d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d) const
+ {
+ d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions, Dispatch const &d ) const
+ {
+ d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d) const
+ {
+ d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter, Dispatch const &d ) const
+ {
+ d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const
+ {
+ d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d ) const
+ {
+ d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const
+ {
+ d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d ) const
+ {
+ d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d) const
+ {
+ d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const
+ {
+ d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d) const
+ {
+ d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d ) const
+ {
+ d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const
+ {
+ d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d ) const
+ {
+ d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const
+ {
+ d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d ) const
+ {
+ d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d) const
+ {
+ d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects, Dispatch const &d ) const
+ {
+ d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d) const
+ {
+ d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions, Dispatch const &d ) const
+ {
+ d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const
+ {
+ d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const
+ {
+ d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const
+ {
+ d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const
+ {
+ d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const
+ {
+ d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const
+ {
+ d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const
+ {
+ d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const
+ {
+ d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d) const
+ {
+ d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d ) const
+ {
+ d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d) const
+ {
+ d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d ) const
+ {
+ d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const
+ {
+ d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const
+ {
+ d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d) const
+ {
+ d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d ) const
+ {
+ d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const
+ {
+ d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const
+ {
+ d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const
+ {
+ d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const
+ {
+ d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d) const
+ {
+ d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d ) const
+ {
+ d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d) const
+ {
+ d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d ) const
+ {
+ d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const
+ {
+ d.vkCmdEndRenderPass( m_commandBuffer );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const
+ {
+ d.vkCmdEndRenderPass( m_commandBuffer );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const
+ {
+ d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d ) const
+ {
+ d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const
+ {
+ d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const
+ {
+ d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const
+ {
+ d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const
+ {
+ d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const
+ {
+ d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const
+ {
+ d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
+ {
+ d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
+ {
+ d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
+ {
+ d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
+ {
+ d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const
+ {
+ d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const
+ {
+ d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const
+ {
+ d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const
+ {
+ d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const
+ {
+ d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy<const WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const
+ {
+ d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const
+ {
+ d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const
+ {
+ d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const
+ {
+ d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const
+ {
+ d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const
+ {
+ d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const
+ {
+ d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const
+ {
+ d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const
+ {
+ d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const
+ {
+ d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const
+ {
+ d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const
+ {
+ d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV*>( pViewportWScalings ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const
+ {
+ d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d) const
+ {
+ d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D*>( pDiscardRectangles ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const Rect2D> discardRectangles, Dispatch const &d ) const
+ {
+ d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const
+ {
+ d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( pSampleLocationsInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const
+ {
+ d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( &sampleLocationsInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
+ {
+ d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
+ {
+ d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const
+ {
+ d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const
+ {
+ d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
+ {
+ d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
+ {
+ d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const
+ {
+ d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), dstOffset, marker );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const
+ {
+ d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), dstOffset, marker );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ struct SubmitInfo
+ {
+ SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , pWaitDstStageMask( pWaitDstStageMask_ )
+ , commandBufferCount( commandBufferCount_ )
+ , pCommandBuffers( pCommandBuffers_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphores( pSignalSemaphores_ )
+ {
+ }
+
+ SubmitInfo( VkSubmitInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubmitInfo ) );
+ }
+
+ SubmitInfo& operator=( VkSubmitInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( SubmitInfo ) );
+ return *this;
+ }
+ SubmitInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+ {
+ waitSemaphoreCount = waitSemaphoreCount_;
+ return *this;
+ }
+
+ SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+ {
+ pWaitSemaphores = pWaitSemaphores_;
+ return *this;
+ }
+
+ SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
+ {
+ pWaitDstStageMask = pWaitDstStageMask_;
+ return *this;
+ }
+
+ SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+ {
+ commandBufferCount = commandBufferCount_;
+ return *this;
+ }
+
+ SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
+ {
+ pCommandBuffers = pCommandBuffers_;
+ return *this;
+ }
+
+ SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+ {
+ signalSemaphoreCount = signalSemaphoreCount_;
+ return *this;
+ }
+
+ SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+ {
+ pSignalSemaphores = pSignalSemaphores_;
+ return *this;
+ }
+
+ operator const VkSubmitInfo&() const
+ {
+ return *reinterpret_cast<const VkSubmitInfo*>(this);
+ }
+
+ bool operator==( SubmitInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+ && ( pWaitSemaphores == rhs.pWaitSemaphores )
+ && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+ && ( commandBufferCount == rhs.commandBufferCount )
+ && ( pCommandBuffers == rhs.pCommandBuffers )
+ && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+ && ( pSignalSemaphores == rhs.pSignalSemaphores );
+ }
+
+ bool operator!=( SubmitInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eSubmitInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t waitSemaphoreCount;
+ const Semaphore* pWaitSemaphores;
+ const PipelineStageFlags* pWaitDstStageMask;
+ uint32_t commandBufferCount;
+ const CommandBuffer* pCommandBuffers;
+ uint32_t signalSemaphoreCount;
+ const Semaphore* pSignalSemaphores;
+ };
+ static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+
+ class Queue
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Queue()
+ : m_queue(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t )
+ : m_queue(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue )
+ : m_queue( queue )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Queue & operator=(VkQueue queue)
+ {
+ m_queue = queue;
+ return *this;
+ }
+#endif
+
+ Queue & operator=( std::nullptr_t )
+ {
+ m_queue = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Queue const & rhs ) const
+ {
+ return m_queue == rhs.m_queue;
+ }
+
+ bool operator!=(Queue const & rhs ) const
+ {
+ return m_queue != rhs.m_queue;
+ }
+
+ bool operator<(Queue const & rhs ) const
+ {
+ return m_queue < rhs.m_queue;
+ }
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result waitIdle(Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type waitIdle(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const
+ {
+ return m_queue;
+ }
+
+ explicit operator bool() const
+ {
+ return m_queue != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_queue == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkQueue m_queue;
+ };
+
+ static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Queue::submit( ArrayProxy<const SubmitInfo> submits, Fence fence, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
+ {
+ d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
+ {
+ d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const
+ {
+ d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const
+ {
+ d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
+ {
+ d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
+ {
+ d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ class Device;
+
+ template <> class UniqueHandleTraits<Buffer> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueBuffer = UniqueHandle<Buffer>;
+ template <> class UniqueHandleTraits<BufferView> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueBufferView = UniqueHandle<BufferView>;
+ template <> class UniqueHandleTraits<CommandBuffer> {public: using deleter = PoolFree<Device, CommandPool>; };
+ using UniqueCommandBuffer = UniqueHandle<CommandBuffer>;
+ template <> class UniqueHandleTraits<CommandPool> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueCommandPool = UniqueHandle<CommandPool>;
+ template <> class UniqueHandleTraits<DescriptorPool> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueDescriptorPool = UniqueHandle<DescriptorPool>;
+ template <> class UniqueHandleTraits<DescriptorSet> {public: using deleter = PoolFree<Device, DescriptorPool>; };
+ using UniqueDescriptorSet = UniqueHandle<DescriptorSet>;
+ template <> class UniqueHandleTraits<DescriptorSetLayout> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout>;
+ template <> class UniqueHandleTraits<DescriptorUpdateTemplate> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate>;
+ template <> class UniqueHandleTraits<DeviceMemory> {public: using deleter = ObjectFree<Device>; };
+ using UniqueDeviceMemory = UniqueHandle<DeviceMemory>;
+ template <> class UniqueHandleTraits<Event> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueEvent = UniqueHandle<Event>;
+ template <> class UniqueHandleTraits<Fence> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueFence = UniqueHandle<Fence>;
+ template <> class UniqueHandleTraits<Framebuffer> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueFramebuffer = UniqueHandle<Framebuffer>;
+ template <> class UniqueHandleTraits<Image> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueImage = UniqueHandle<Image>;
+ template <> class UniqueHandleTraits<ImageView> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueImageView = UniqueHandle<ImageView>;
+ template <> class UniqueHandleTraits<IndirectCommandsLayoutNVX> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueIndirectCommandsLayoutNVX = UniqueHandle<IndirectCommandsLayoutNVX>;
+ template <> class UniqueHandleTraits<ObjectTableNVX> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueObjectTableNVX = UniqueHandle<ObjectTableNVX>;
+ template <> class UniqueHandleTraits<Pipeline> {public: using deleter = ObjectDestroy<Device>; };
+ using UniquePipeline = UniqueHandle<Pipeline>;
+ template <> class UniqueHandleTraits<PipelineCache> {public: using deleter = ObjectDestroy<Device>; };
+ using UniquePipelineCache = UniqueHandle<PipelineCache>;
+ template <> class UniqueHandleTraits<PipelineLayout> {public: using deleter = ObjectDestroy<Device>; };
+ using UniquePipelineLayout = UniqueHandle<PipelineLayout>;
+ template <> class UniqueHandleTraits<QueryPool> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueQueryPool = UniqueHandle<QueryPool>;
+ template <> class UniqueHandleTraits<RenderPass> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueRenderPass = UniqueHandle<RenderPass>;
+ template <> class UniqueHandleTraits<Sampler> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueSampler = UniqueHandle<Sampler>;
+ template <> class UniqueHandleTraits<SamplerYcbcrConversion> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion>;
+ template <> class UniqueHandleTraits<Semaphore> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueSemaphore = UniqueHandle<Semaphore>;
+ template <> class UniqueHandleTraits<ShaderModule> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueShaderModule = UniqueHandle<ShaderModule>;
+ template <> class UniqueHandleTraits<SwapchainKHR> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR>;
+ template <> class UniqueHandleTraits<ValidationCacheEXT> {public: using deleter = ObjectDestroy<Device>; };
+ using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ class Device
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Device()
+ : m_device(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Device( std::nullptr_t )
+ : m_device(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device )
+ : m_device( device )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Device & operator=(VkDevice device)
+ {
+ m_device = device;
+ return *this;
+ }
+#endif
+
+ Device & operator=( std::nullptr_t )
+ {
+ m_device = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Device const & rhs ) const
+ {
+ return m_device == rhs.m_device;
+ }
+
+ bool operator!=(Device const & rhs ) const
+ {
+ return m_device != rhs.m_device;
+ }
+
+ bool operator<(Device const & rhs ) const
+ {
+ return m_device < rhs.m_device;
+ }
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result waitIdle(Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type waitIdle(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDeviceMemory>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void free( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void*>::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void unmapMemory( DeviceMemory memory, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ DeviceSize getMemoryCommitment( DeviceMemory memory, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MemoryRequirements getBufferMemoryRequirements( Buffer buffer, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MemoryRequirements getImageMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueFence>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type resetFences( ArrayProxy<const Fence> fences, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getFenceStatus( Fence fence, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSemaphore>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueEvent>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyEvent( Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getEventStatus( Event event, Dispatch const &d = Dispatch() ) const;
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result setEvent( Event event, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type setEvent( Event event, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result resetEvent( Event event, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type resetEvent( Event event, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueQueryPool>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = DispatchLoaderStatic>
+ Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueBuffer>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueBufferView>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueImage>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyImage( Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueImageView>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueShaderModule>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniquePipelineCache>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+ template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Pipeline>::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<UniquePipeline,Allocator>>::type createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+ template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniquePipeline>::type createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+ template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Pipeline>::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<UniquePipeline,Allocator>>::type createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+ template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniquePipeline>::type createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniquePipelineLayout>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSampler>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDescriptorSetLayout>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDescriptorPool>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<UniqueDescriptorSet,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type free( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueFramebuffer>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueRenderPass>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Extent2D getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueCommandPool>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<UniqueCommandBuffer,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void free( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+ template <typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<UniqueSwapchainKHR,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+ template <typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSwapchainKHR>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSwapchainKHR>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<Image>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValue<uint32_t> acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<HANDLE>::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueIndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueObjectTableNVX>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Fence>::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<uint64_t>::type getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindImageMemory2( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDescriptorUpdateTemplate>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> swapchains, ArrayProxy<const HdrMetadataEXT> metadata, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSamplerYcbcrConversion>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSamplerYcbcrConversion>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueValidationCacheEXT>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy<const ValidationCacheEXT> srcCaches, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<StructureChain<T...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const
+ {
+ return m_device;
+ }
+
+ explicit operator bool() const
+ {
+ return m_device != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_device == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDevice m_device;
+ };
+
+ static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const
+ {
+ return d.vkGetDeviceProcAddr( m_device, pName );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const
+ {
+ return d.vkGetDeviceProcAddr( m_device, name.c_str() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d) const
+ {
+ d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const
+ {
+ Queue queue;
+ d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
+ return queue;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DeviceMemory memory;
+ Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+ return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDeviceMemory>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DeviceMemory memory;
+ Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+
+ ObjectFree<Device> deleter( *this, allocator );
+ return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), ppData ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void*>::type Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, Dispatch const &d ) const
+ {
+ void* pData;
+ Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), &pData ) );
+ return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d) const
+ {
+ d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d ) const
+ {
+ d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const
+ {
+ d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), pCommittedMemoryInBytes );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory, Dispatch const &d ) const
+ {
+ DeviceSize committedMemoryInBytes;
+ d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), &committedMemoryInBytes );
+ return committedMemoryInBytes;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer, Dispatch const &d ) const
+ {
+ MemoryRequirements memoryRequirements;
+ d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image, Dispatch const &d ) const
+ {
+ MemoryRequirements memoryRequirements;
+ d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( Image image, Dispatch const &d ) const
+ {
+ std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
+ return sparseMemoryRequirements;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Fence fence;
+ Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueFence>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Fence fence;
+ Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetFences( ArrayProxy<const Fence> fences, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), waitAll, timeout ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), waitAll, timeout ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Semaphore semaphore;
+ Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+ return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSemaphore>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Semaphore semaphore;
+ Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Event event;
+ Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
+ return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueEvent>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Event event;
+ Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::setEvent( Event event, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::setEvent( Event event, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::resetEvent( Event event, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetEvent( Event event, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ QueryPool queryPool;
+ Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+ return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueQueryPool>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ QueryPool queryPool;
+ Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast<VkQueryResultFlags>( flags ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride, static_cast<VkQueryResultFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Buffer buffer;
+ Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+ return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueBuffer>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Buffer buffer;
+ Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ BufferView view;
+ Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
+ return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueBufferView>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ BufferView view;
+ Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Image image;
+ Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
+ return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueImage>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Image image;
+ Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d) const
+ {
+ d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d ) const
+ {
+ SubresourceLayout layout;
+ d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
+ return layout;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ImageView view;
+ Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
+ return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueImageView>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ImageView view;
+ Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ShaderModule shaderModule;
+ Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+ return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueShaderModule>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ShaderModule shaderModule;
+ Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ PipelineCache pipelineCache;
+ Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+ return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniquePipelineCache>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ PipelineCache pipelineCache;
+ Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d ) const
+ {
+ std::vector<uint8_t,Allocator> data;
+ size_t dataSize;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
+ if ( ( result == Result::eSuccess ) && dataSize )
+ {
+ data.resize( dataSize );
+ result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ data.resize( dataSize );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
+ }
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniquePipeline,Allocator>>::type Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
+ std::vector<UniquePipeline, Allocator> pipelines;
+ pipelines.reserve( createInfos.size() );
+ Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
+ Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ for ( size_t i=0 ; i<createInfos.size() ; i++ )
+ {
+ pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
+ }
+
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
+ }
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniquePipeline>::type Device::createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
+ }
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniquePipeline,Allocator>>::type Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
+ std::vector<UniquePipeline, Allocator> pipelines;
+ pipelines.reserve( createInfos.size() );
+ Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
+ Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ for ( size_t i=0 ; i<createInfos.size() ; i++ )
+ {
+ pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
+ }
+
+ return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
+ }
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniquePipeline>::type Device::createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ PipelineLayout pipelineLayout;
+ Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+ return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniquePipelineLayout>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ PipelineLayout pipelineLayout;
+ Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Sampler sampler;
+ Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+ return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSampler>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Sampler sampler;
+ Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorSetLayout setLayout;
+ Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
+ return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDescriptorSetLayout>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorSetLayout setLayout;
+ Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorPool descriptorPool;
+ Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+ return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDescriptorPool>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorPool descriptorPool;
+ Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
+ {
+ std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
+ Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+ return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueDescriptorSet,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
+ {
+ static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueDescriptorSet ), "DescriptorSet is greater than UniqueDescriptorSet!" );
+ std::vector<UniqueDescriptorSet, Allocator> descriptorSets;
+ descriptorSets.reserve( allocateInfo.descriptorSetCount );
+ DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueDescriptorSet ) - sizeof( DescriptorSet ) ) );
+ Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
+
+ PoolFree<Device,DescriptorPool> deleter( *this, allocateInfo.descriptorPool );
+ for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
+ {
+ descriptorSets.push_back( UniqueDescriptorSet( buffer[i], deleter ) );
+ }
+
+ return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::free( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const
+ {
+ d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies, Dispatch const &d ) const
+ {
+ d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Framebuffer framebuffer;
+ Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+ return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueFramebuffer>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Framebuffer framebuffer;
+ Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ RenderPass renderPass;
+ Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+ return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueRenderPass>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ RenderPass renderPass;
+ Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d) const
+ {
+ d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d ) const
+ {
+ Extent2D granularity;
+ d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
+ return granularity;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ CommandPool commandPool;
+ Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+ return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueCommandPool>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ CommandPool commandPool;
+ Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
+ {
+ std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
+ Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+ return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueCommandBuffer,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
+ {
+ static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueCommandBuffer ), "CommandBuffer is greater than UniqueCommandBuffer!" );
+ std::vector<UniqueCommandBuffer, Allocator> commandBuffers;
+ commandBuffers.reserve( allocateInfo.commandBufferCount );
+ CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueCommandBuffer ) - sizeof( CommandBuffer ) ) );
+ Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
+
+ PoolFree<Device,CommandPool> deleter( *this, allocateInfo.commandPool );
+ for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
+ {
+ commandBuffers.push_back( UniqueCommandBuffer( buffer[i], deleter ) );
+ }
+
+ return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const
+ {
+ d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d ) const
+ {
+ d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const
+ {
+ d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d ) const
+ {
+ d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+ return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
+ }
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SwapchainKHR swapchain;
+ Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+ return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueSwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueSwapchainKHR ), "SwapchainKHR is greater than UniqueSwapchainKHR!" );
+ std::vector<UniqueSwapchainKHR, Allocator> swapchainKHRs;
+ swapchainKHRs.reserve( createInfos.size() );
+ SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueSwapchainKHR ) - sizeof( SwapchainKHR ) ) );
+ Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ for ( size_t i=0 ; i<createInfos.size() ; i++ )
+ {
+ swapchainKHRs.push_back( UniqueSwapchainKHR( buffer[i], deleter ) );
+ }
+
+ return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ }
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSwapchainKHR>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SwapchainKHR swapchain;
+ Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SwapchainKHR swapchain;
+ Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+ return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSwapchainKHR>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SwapchainKHR swapchain;
+ Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d ) const
+ {
+ std::vector<Image,Allocator> swapchainImages;
+ uint32_t swapchainImageCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && swapchainImageCount )
+ {
+ swapchainImages.resize( swapchainImageCount );
+ result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+ swapchainImages.resize( swapchainImageCount );
+ return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d ) const
+ {
+ uint32_t imageIndex;
+ Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
+ return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
+ {
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+ return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueIndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ObjectTableNVX objectTable;
+ Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+ return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueObjectTableNVX>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ObjectTableNVX objectTable;
+ Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() );
+#else
+ if ( pObjectTableEntries.size() != objectIndices.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ Result result = static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() );
+#else
+ if ( objectEntryTypes.size() != objectIndices.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ Result result = static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const
+ {
+ d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const
+ {
+ d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const
+ {
+ d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const
+ {
+ d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ {
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( pMemoryWin32HandleProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
+ {
+ MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+ Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( &memoryWin32HandleProperties ) ) );
+ return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ {
+ int fd;
+ Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+ return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( pMemoryFdProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
+ {
+ MemoryFdPropertiesKHR memoryFdProperties;
+ Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( &memoryFdProperties ) ) );
+ return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ {
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( pImportSemaphoreWin32HandleInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( &importSemaphoreWin32HandleInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ {
+ int fd;
+ Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+ return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( pImportSemaphoreFdInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( &importSemaphoreFdInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ {
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( pImportFenceWin32HandleInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( &importFenceWin32HandleInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ {
+ int fd;
+ Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+ return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( pImportFenceFdInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( &importFenceFdInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Fence fence;
+ Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Fence fence;
+ Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
+ {
+ uint64_t counterValue;
+ Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
+ return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const
+ {
+ d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const
+ {
+ PeerMemoryFeatureFlags peerMemoryFeatures;
+ d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
+ return peerMemoryFeatures;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const
+ {
+ d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const
+ {
+ PeerMemoryFeatureFlags peerMemoryFeatures;
+ d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
+ return peerMemoryFeatures;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( pDeviceGroupPresentCapabilities ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
+ {
+ DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+ Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( &deviceGroupPresentCapabilities ) ) );
+ return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const
+ {
+ DeviceGroupPresentModeFlagsKHR modes;
+ Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
+ return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( pAcquireInfo ), pImageIndex ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const
+ {
+ uint32_t imageIndex;
+ Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( &acquireInfo ), &imageIndex ) );
+ return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+ return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+ return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const
+ {
+ d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const
+ {
+ d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const
+ {
+ d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const
+ {
+ d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d) const
+ {
+ d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR*>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT*>( pMetadata ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> swapchains, ArrayProxy<const HdrMetadataEXT> metadata, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
+#else
+ if ( swapchains.size() != metadata.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast<const VkSwapchainKHR*>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT*>( metadata.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( pDisplayTimingProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const
+ {
+ RefreshCycleDurationGOOGLE displayTimingProperties;
+ Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( &displayTimingProperties ) ) );
+ return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( pPresentationTimings ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const
+ {
+ std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings;
+ uint32_t presentationTimingCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentationTimingCount )
+ {
+ presentationTimings.resize( presentationTimingCount );
+ result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( presentationTimings.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+ presentationTimings.resize( presentationTimingCount );
+ return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ MemoryRequirements2 memoryRequirements;
+ d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
+ d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ MemoryRequirements2 memoryRequirements;
+ d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
+ d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ MemoryRequirements2 memoryRequirements;
+ d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
+ d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ MemoryRequirements2 memoryRequirements;
+ d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return memoryRequirements;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
+ d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
+ return sparseMemoryRequirements;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const
+ {
+ d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+ {
+ std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
+ return sparseMemoryRequirements;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+ return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+ return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d) const
+ {
+ d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( pQueueInfo ), reinterpret_cast<VkQueue*>( pQueue ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const
+ {
+ Queue queue;
+ d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( &queueInfo ), reinterpret_cast<VkQueue*>( &queue ) );
+ return queue;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkValidationCacheEXT*>( pValidationCache ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ValidationCacheEXT validationCache;
+ Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
+ return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueValidationCacheEXT>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ ValidationCacheEXT validationCache;
+ Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
+
+ ObjectDestroy<Device> deleter( *this, allocator );
+ return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d ) const
+ {
+ std::vector<uint8_t,Allocator> data;
+ size_t dataSize;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+ if ( ( result == Result::eSuccess ) && dataSize )
+ {
+ data.resize( dataSize );
+ result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ data.resize( dataSize );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT*>( pSrcCaches ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy<const ValidationCacheEXT> srcCaches, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size() , reinterpret_cast<const VkValidationCacheEXT*>( srcCaches.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const
+ {
+ d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+ {
+ DescriptorSetLayoutSupport support;
+ d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+ return support;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ DescriptorSetLayoutSupport& support = structureChain.template get<DescriptorSetLayoutSupport>();
+ d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const
+ {
+ d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+ {
+ DescriptorSetLayoutSupport support;
+ d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+ return support;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ DescriptorSetLayoutSupport& support = structureChain.template get<DescriptorSetLayoutSupport>();
+ d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d ) const
+ {
+ std::vector<uint8_t,Allocator> info;
+ size_t infoSize;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ) );
+ if ( ( result == Result::eSuccess ) && infoSize )
+ {
+ info.resize( infoSize );
+ result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void*>( info.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( infoSize <= info.size() );
+ info.resize( infoSize );
+ return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( pNameInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( &nameInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( pTagInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( &tagInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( pMemoryHostPointerProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
+ {
+ MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+ Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( &memoryHostPointerProperties ) ) );
+ return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+ {
+ AndroidHardwareBufferPropertiesANDROID properties;
+ Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<StructureChain<T...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get<AndroidHardwareBufferPropertiesANDROID>();
+ Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
+ return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( pInfo ), pBuffer ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
+ {
+ struct AHardwareBuffer* buffer;
+ Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( &info ), &buffer ) );
+ return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+
+ template <> class UniqueHandleTraits<Device> {public: using deleter = ObjectDestroy<NoParent>; };
+ using UniqueDevice = UniqueHandle<Device>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ class PhysicalDevice
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR PhysicalDevice()
+ : m_physicalDevice(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t )
+ : m_physicalDevice(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice )
+ : m_physicalDevice( physicalDevice )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ PhysicalDevice & operator=(VkPhysicalDevice physicalDevice)
+ {
+ m_physicalDevice = physicalDevice;
+ return *this;
+ }
+#endif
+
+ PhysicalDevice & operator=( std::nullptr_t )
+ {
+ m_physicalDevice = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( PhysicalDevice const & rhs ) const
+ {
+ return m_physicalDevice == rhs.m_physicalDevice;
+ }
+
+ bool operator!=(PhysicalDevice const & rhs ) const
+ {
+ return m_physicalDevice != rhs.m_physicalDevice;
+ }
+
+ bool operator<(PhysicalDevice const & rhs ) const
+ {
+ return m_physicalDevice < rhs.m_physicalDevice;
+ }
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceProperties getProperties(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceFeatures getFeatures(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ FormatProperties getFormatProperties( Format format, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ImageFormatProperties>::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDevice>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<LayerProperties>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DisplayModeKHR>::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = Dispatch() ) const;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getFeatures2(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getFeatures2KHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceProperties2 getProperties2(Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getProperties2(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ StructureChain<T...> getProperties2KHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ FormatProperties2 getFormatProperties2( Format format, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ FormatProperties2 getFormatProperties2KHR( Format format, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<StructureChain<T...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<StructureChain<T...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = DispatchLoaderStatic>
+ std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
+#else
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<void>::type releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Display>::type acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<Rect2D>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ MultisamplePropertiesEXT getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const;
+ template <typename ...T, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<StructureChain<T...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const
+ {
+ return m_physicalDevice;
+ }
+
+ explicit operator bool() const
+ {
+ return m_physicalDevice != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_physicalDevice == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkPhysicalDevice m_physicalDevice;
+ };
+
+ static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const
+ {
+ PhysicalDeviceProperties properties;
+ d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const
+ {
+ std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
+ return queueFamilyProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const
+ {
+ PhysicalDeviceMemoryProperties memoryProperties;
+ d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
+ return memoryProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const
+ {
+ PhysicalDeviceFeatures features;
+ d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
+ return features;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format, Dispatch const &d ) const
+ {
+ FormatProperties formatProperties;
+ d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
+ return formatProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d ) const
+ {
+ ImageFormatProperties imageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
+ return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Device device;
+ Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
+ return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDevice>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ Device device;
+ Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
+
+ ObjectDestroy<NoParent> deleter( allocator );
+ return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const
+ {
+ std::vector<LayerProperties,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d ) const
+ {
+ std::vector<ExtensionProperties,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d ) const
+ {
+ std::vector<SparseImageFormatProperties,Allocator> properties;
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const
+ {
+ std::vector<DisplayPropertiesKHR,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const
+ {
+ std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const
+ {
+ std::vector<DisplayKHR,Allocator> displays;
+ uint32_t displayCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && displayCount )
+ {
+ displays.resize( displayCount );
+ result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+ displays.resize( displayCount );
+ return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d ) const
+ {
+ std::vector<DisplayModePropertiesKHR,Allocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( propertyCount );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DisplayModeKHR mode;
+ Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
+ return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
+ {
+ DisplayPlaneCapabilitiesKHR capabilities;
+ Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
+ return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection, Dispatch const &d) const
+ {
+ return d.vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection, Dispatch const &d ) const
+ {
+ return d.vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), pSupported ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d ) const
+ {
+ Bool32 supported;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), &supported ) );
+ return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d ) const
+ {
+ SurfaceCapabilitiesKHR surfaceCapabilities;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
+ return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d ) const
+ {
+ std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ surfaceFormats.resize( surfaceFormatCount );
+ return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const
+ {
+ std::vector<PresentModeKHR,Allocator> presentModes;
+ uint32_t presentModeCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentModeCount )
+ {
+ presentModes.resize( presentModeCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ presentModes.resize( presentModeCount );
+ return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const
+ {
+ return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const
+ {
+ return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const
+ {
+ return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const
+ {
+ return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const
+ {
+ return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const
+ {
+ return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const
+ {
+ return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const
+ {
+ return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
+ {
+ ExternalImageFormatPropertiesNV externalImageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
+ return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const
+ {
+ DeviceGeneratedCommandsLimitsNVX limits;
+ d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
+ return limits;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const
+ {
+ PhysicalDeviceFeatures2 features;
+ d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+ return features;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> PhysicalDevice::getFeatures2(Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ PhysicalDeviceFeatures2& features = structureChain.template get<PhysicalDeviceFeatures2>();
+ d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const
+ {
+ PhysicalDeviceFeatures2 features;
+ d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+ return features;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ PhysicalDeviceFeatures2& features = structureChain.template get<PhysicalDeviceFeatures2>();
+ d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const
+ {
+ PhysicalDeviceProperties2 properties;
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+ return properties;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> PhysicalDevice::getProperties2(Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ PhysicalDeviceProperties2& properties = structureChain.template get<PhysicalDeviceProperties2>();
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const
+ {
+ PhysicalDeviceProperties2 properties;
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+ return properties;
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE StructureChain<T...> PhysicalDevice::getProperties2KHR(Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ PhysicalDeviceProperties2& properties = structureChain.template get<PhysicalDeviceProperties2>();
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2( Format format, Dispatch const &d ) const
+ {
+ FormatProperties2 formatProperties;
+ d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
+ return formatProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2KHR( Format format, Dispatch const &d ) const
+ {
+ FormatProperties2 formatProperties;
+ d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
+ return formatProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ {
+ ImageFormatProperties2 imageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+ return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<StructureChain<T...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ ImageFormatProperties2& imageFormatProperties = structureChain.template get<ImageFormatProperties2>();
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+ return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ {
+ ImageFormatProperties2 imageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+ return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<StructureChain<T...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ ImageFormatProperties2& imageFormatProperties = structureChain.template get<ImageFormatProperties2>();
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+ return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
+ {
+ std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+ return queueFamilyProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
+ {
+ std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+ return queueFamilyProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const
+ {
+ PhysicalDeviceMemoryProperties2 memoryProperties;
+ d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
+ return memoryProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const
+ {
+ PhysicalDeviceMemoryProperties2 memoryProperties;
+ d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
+ return memoryProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
+ {
+ std::vector<SparseImageFormatProperties2,Allocator> properties;
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
+ {
+ std::vector<SparseImageFormatProperties2,Allocator> properties;
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const
+ {
+ ExternalBufferProperties externalBufferProperties;
+ d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
+ return externalBufferProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const
+ {
+ ExternalBufferProperties externalBufferProperties;
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
+ return externalBufferProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const
+ {
+ ExternalSemaphoreProperties externalSemaphoreProperties;
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
+ return externalSemaphoreProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const
+ {
+ ExternalSemaphoreProperties externalSemaphoreProperties;
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
+ return externalSemaphoreProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const
+ {
+ ExternalFenceProperties externalFenceProperties;
+ d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
+ return externalFenceProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const
+ {
+ ExternalFenceProperties externalFenceProperties;
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
+ return externalFenceProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d ) const
+ {
+ Display dpy;
+ Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
+ return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const
+ {
+ DisplayKHR display;
+ Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+ return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d ) const
+ {
+ SurfaceCapabilities2EXT surfaceCapabilities;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
+ return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d ) const
+ {
+ std::vector<Rect2D,Allocator> rects;
+ uint32_t rectCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && rectCount )
+ {
+ rects.resize( rectCount );
+ result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+ rects.resize( rectCount );
+ return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const
+ {
+ d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( pMultisampleProperties ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d ) const
+ {
+ MultisamplePropertiesEXT multisampleProperties;
+ d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( &multisampleProperties ) );
+ return multisampleProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( pSurfaceCapabilities ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ {
+ SurfaceCapabilities2KHR surfaceCapabilities;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
+ return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
+ }
+ template <typename ...T, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<StructureChain<T...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ {
+ StructureChain<T...> structureChain;
+ SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get<SurfaceCapabilities2KHR>();
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
+ return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ {
+ std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ surfaceFormats.resize( surfaceFormatCount );
+ return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ struct CmdProcessCommandsInfoNVX
+ {
+ CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
+ : objectTable( objectTable_ )
+ , indirectCommandsLayout( indirectCommandsLayout_ )
+ , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
+ , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
+ , maxSequencesCount( maxSequencesCount_ )
+ , targetCommandBuffer( targetCommandBuffer_ )
+ , sequencesCountBuffer( sequencesCountBuffer_ )
+ , sequencesCountOffset( sequencesCountOffset_ )
+ , sequencesIndexBuffer( sequencesIndexBuffer_ )
+ , sequencesIndexOffset( sequencesIndexOffset_ )
+ {
+ }
+
+ CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) );
+ }
+
+ CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) );
+ return *this;
+ }
+ CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
+ {
+ objectTable = objectTable_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
+ {
+ indirectCommandsLayout = indirectCommandsLayout_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
+ {
+ indirectCommandsTokenCount = indirectCommandsTokenCount_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
+ {
+ pIndirectCommandsTokens = pIndirectCommandsTokens_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
+ {
+ maxSequencesCount = maxSequencesCount_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
+ {
+ targetCommandBuffer = targetCommandBuffer_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
+ {
+ sequencesCountBuffer = sequencesCountBuffer_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
+ {
+ sequencesCountOffset = sequencesCountOffset_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
+ {
+ sequencesIndexBuffer = sequencesIndexBuffer_;
+ return *this;
+ }
+
+ CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
+ {
+ sequencesIndexOffset = sequencesIndexOffset_;
+ return *this;
+ }
+
+ operator const VkCmdProcessCommandsInfoNVX&() const
+ {
+ return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
+ }
+
+ bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( objectTable == rhs.objectTable )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
+ && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
+ && ( maxSequencesCount == rhs.maxSequencesCount )
+ && ( targetCommandBuffer == rhs.targetCommandBuffer )
+ && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+ && ( sequencesCountOffset == rhs.sequencesCountOffset )
+ && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+ && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+ }
+
+ bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eCmdProcessCommandsInfoNVX;
+
+ public:
+ const void* pNext = nullptr;
+ ObjectTableNVX objectTable;
+ IndirectCommandsLayoutNVX indirectCommandsLayout;
+ uint32_t indirectCommandsTokenCount;
+ const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
+ uint32_t maxSequencesCount;
+ CommandBuffer targetCommandBuffer;
+ Buffer sequencesCountBuffer;
+ DeviceSize sequencesCountOffset;
+ Buffer sequencesIndexBuffer;
+ DeviceSize sequencesIndexOffset;
+ };
+ static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
+
+ struct PhysicalDeviceGroupProperties
+ {
+ operator const VkPhysicalDeviceGroupProperties&() const
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>(this);
+ }
+
+ bool operator==( PhysicalDeviceGroupProperties const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( physicalDeviceCount == rhs.physicalDeviceCount )
+ && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( PhysicalDevice ) ) == 0 )
+ && ( subsetAllocation == rhs.subsetAllocation );
+ }
+
+ bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
+
+ public:
+ void* pNext = nullptr;
+ uint32_t physicalDeviceCount;
+ PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
+ Bool32 subsetAllocation;
+ };
+ static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
+
+ using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ class Instance;
+
+ template <> class UniqueHandleTraits<DebugReportCallbackEXT> {public: using deleter = ObjectDestroy<Instance>; };
+ using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT>;
+ template <> class UniqueHandleTraits<DebugUtilsMessengerEXT> {public: using deleter = ObjectDestroy<Instance>; };
+ using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT>;
+ template <> class UniqueHandleTraits<SurfaceKHR> {public: using deleter = ObjectDestroy<Instance>; };
+ using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ class Instance
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR Instance()
+ : m_instance(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t )
+ : m_instance(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance )
+ : m_instance( instance )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Instance & operator=(VkInstance instance)
+ {
+ m_instance = instance;
+ return *this;
+ }
+#endif
+
+ Instance & operator=( std::nullptr_t )
+ {
+ m_instance = VK_NULL_HANDLE;
+ return *this;
+ }
+
+ bool operator==( Instance const & rhs ) const
+ {
+ return m_instance == rhs.m_instance;
+ }
+
+ bool operator!=(Instance const & rhs ) const
+ {
+ return m_instance != rhs.m_instance;
+ }
+
+ bool operator<(Instance const & rhs ) const
+ {
+ return m_instance < rhs.m_instance;
+ }
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDebugReportCallbackEXT>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = DispatchLoaderStatic>
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueSurfaceKHR>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueDebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void destroy( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = Dispatch() ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = Dispatch() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const
+ {
+ return m_instance;
+ }
+
+ explicit operator bool() const
+ {
+ return m_instance != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const
+ {
+ return m_instance == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkInstance m_instance;
+ };
+
+ static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const
+ {
+ std::vector<PhysicalDevice,Allocator> physicalDevices;
+ uint32_t physicalDeviceCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ {
+ physicalDevices.resize( physicalDeviceCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ physicalDevices.resize( physicalDeviceCount );
+ return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const
+ {
+ return d.vkGetInstanceProcAddr( m_instance, pName );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const
+ {
+ return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMirSurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMirSurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DebugReportCallbackEXT callback;
+ Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+ return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DebugReportCallbackEXT callback;
+ Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const
+ {
+ d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() );
+#else
+ if ( layerPrefix.size() != message.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
+ }
+#endif // VULKAN_HPP_NO_EXCEPTIONS
+ d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const
+ {
+ std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
+ uint32_t physicalDeviceGroupCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const
+ {
+ std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
+ uint32_t physicalDeviceGroupCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueSurfaceKHR>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const
+ {
+ return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( pMessenger ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DebugUtilsMessengerEXT messenger;
+ Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
+ return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueDebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ DebugUtilsMessengerEXT messenger;
+ Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
+
+ ObjectDestroy<Instance> deleter( *this, allocator );
+ return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+ {
+ d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const
+ {
+ d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( pCallbackData ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const
+ {
+ d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( &callbackData ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ struct DeviceGroupDeviceCreateInfo
+ {
+ DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0, const PhysicalDevice* pPhysicalDevices_ = nullptr )
+ : physicalDeviceCount( physicalDeviceCount_ )
+ , pPhysicalDevices( pPhysicalDevices_ )
+ {
+ }
+
+ DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) );
+ }
+
+ DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs )
+ {
+ memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) );
+ return *this;
+ }
+ DeviceGroupDeviceCreateInfo& setPNext( const void* pNext_ )
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DeviceGroupDeviceCreateInfo& setPhysicalDeviceCount( uint32_t physicalDeviceCount_ )
+ {
+ physicalDeviceCount = physicalDeviceCount_;
+ return *this;
+ }
+
+ DeviceGroupDeviceCreateInfo& setPPhysicalDevices( const PhysicalDevice* pPhysicalDevices_ )
+ {
+ pPhysicalDevices = pPhysicalDevices_;
+ return *this;
+ }
+
+ operator const VkDeviceGroupDeviceCreateInfo&() const
+ {
+ return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>(this);
+ }
+
+ bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( physicalDeviceCount == rhs.physicalDeviceCount )
+ && ( pPhysicalDevices == rhs.pPhysicalDevices );
+ }
+
+ bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const
+ {
+ return !operator==( rhs );
+ }
+
+ private:
+ StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
+
+ public:
+ const void* pNext = nullptr;
+ uint32_t physicalDeviceCount;
+ const PhysicalDevice* pPhysicalDevices;
+ };
+ static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
+
+ using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+
+ template <> class UniqueHandleTraits<Instance> {public: using deleter = ObjectDestroy<NoParent>; };
+ using UniqueInstance = UniqueHandle<Instance>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+ template<typename Dispatch = DispatchLoaderStatic>
+ Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d = Dispatch() );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() );
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = DispatchLoaderStatic>
+ ResultValueType<UniqueInstance>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() );
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d)
+ {
+ return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+ {
+ Instance instance;
+ Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
+ return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE ResultValueType<UniqueInstance>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+ {
+ Instance instance;
+ Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
+
+ ObjectDestroy<NoParent> deleter( allocator );
+ return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <> struct isStructureChainValid<PresentInfoKHR, DisplayPresentInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, DedicatedAllocationImageCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BufferCreateInfo, DedicatedAllocationBufferCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, DedicatedAllocationMemoryAllocateInfoNV>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoNV>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoNV>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePushDescriptorPropertiesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PresentInfoKHR, PresentRegionsKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVariablePointerFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVariablePointerFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceIDProperties>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <> struct isStructureChainValid<SubmitInfo, D3D12FenceSubmitInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassMultiviewCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BindBufferMemoryInfo, BindBufferMemoryDeviceGroupInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemoryDeviceGroupInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<RenderPassBeginInfo, DeviceGroupRenderPassBeginInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<CommandBufferBeginInfo, DeviceGroupCommandBufferBeginInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SubmitInfo, DeviceGroupSubmitInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BindSparseInfo, DeviceGroupBindSparseInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, ImageSwapchainCreateInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemorySwapchainInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PresentInfoKHR, PresentTimesInfoGOOGLE>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportWScalingStateCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDiscardRectanglePropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryRequirements2, MemoryDedicatedRequirements>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryDedicatedAllocateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SamplerCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageViewCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageFormatProperties2, SamplerYcbcrConversionImageFormatProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageFormatProperties2, TextureLODGatherFormatPropertiesAMD>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SubmitInfo, ProtectedSubmitInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceProtectedMemoryProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageToColorStateCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceBlendOperationAdvancedPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ShaderModuleCreateInfo, ShaderModuleValidationCacheCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMaintenance3Properties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDrawParameterFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceExternalMemoryHostPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceConservativeRasterizationPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCorePropertiesAMD>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDescriptorIndexingPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DescriptorSetAllocateInfo, DescriptorSetVariableDescriptorCountAllocateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DescriptorSetLayoutSupport, DescriptorSetVariableDescriptorCountLayoutSupportEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineVertexInputStateCreateInfo, PipelineVertexInputDivisorStateCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVertexAttributeDivisorPropertiesEXT>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ImportAndroidHardwareBufferInfoANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template <> struct isStructureChainValid<ImageFormatProperties2, AndroidHardwareBufferUsageANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template <> struct isStructureChainValid<ImageCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SamplerYcbcrConversionCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SharedPresentSurfaceCapabilitiesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewUsageCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassInputAttachmentAspectCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BindImageMemoryInfo, BindImagePlaneMemoryInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageMemoryRequirementsInfo2, ImagePlaneMemoryRequirementsInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageMemoryBarrier, SampleLocationsInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassSampleLocationsBeginInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineSampleLocationsStateCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSampleLocationsPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<InstanceCreateInfo, DebugReportCallbackCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateRasterizationOrderAMD>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfoNV>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoNV>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+ template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFlagsEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSubgroupProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceExternalImageFormatInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BufferCreateInfo, ExternalMemoryBufferCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryFdInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryHostPointerInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageFormatProperties2, ExternalImageFormatProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainCounterCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryAllocateFlagsInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PresentInfoKHR, DeviceGroupPresentInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SwapchainCreateInfoKHR, DeviceGroupSwapchainCreateInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportSwizzleStateCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineDiscardRectangleStateCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePointClippingProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SamplerCreateInfo, SamplerReductionModeCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineTessellationStateCreateInfo, PipelineTessellationDomainOriginStateCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ template <> struct isStructureChainValid<AndroidHardwareBufferPropertiesANDROID, AndroidHardwareBufferFormatPropertiesANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ template <> struct isStructureChainValid<PipelineColorBlendStateCreateInfo, PipelineColorBlendAdvancedStateCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageModulationStateCreateInfoNV>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceQueueCreateInfo, DeviceQueueGlobalPriorityCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<InstanceCreateInfo, DebugUtilsMessengerCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationConservativeStateCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DescriptorSetLayoutCreateInfo, DescriptorSetLayoutBindingFlagsCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, DeviceGroupDeviceCreateInfo>{ enum { value = true }; };
+ VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+ VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagBitsMVK)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagsMVK)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagBitsMVK)
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagsMVK)
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBits)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlags)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagBitsNV)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagsNV)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagBitsEXT)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagsEXT)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagBitsNV)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagsNV)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagBitsNV)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagsNV)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagBitsEXT)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagsEXT)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagBitsEXT)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagsEXT)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagBitsEXT)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagsEXT)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagBitsEXT)
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagsEXT)
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
+ {
+ switch (value)
+ {
+ case ImageLayout::eUndefined: return "Undefined";
+ case ImageLayout::eGeneral: return "General";
+ case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
+ case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
+ case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
+ case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
+ case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
+ case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
+ case ImageLayout::ePreinitialized: return "Preinitialized";
+ case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal";
+ case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal";
+ case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
+ case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
+ {
+ switch (value)
+ {
+ case AttachmentLoadOp::eLoad: return "Load";
+ case AttachmentLoadOp::eClear: return "Clear";
+ case AttachmentLoadOp::eDontCare: return "DontCare";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
+ {
+ switch (value)
+ {
+ case AttachmentStoreOp::eStore: return "Store";
+ case AttachmentStoreOp::eDontCare: return "DontCare";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageType value)
+ {
+ switch (value)
+ {
+ case ImageType::e1D: return "1D";
+ case ImageType::e2D: return "2D";
+ case ImageType::e3D: return "3D";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
+ {
+ switch (value)
+ {
+ case ImageTiling::eOptimal: return "Optimal";
+ case ImageTiling::eLinear: return "Linear";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
+ {
+ switch (value)
+ {
+ case ImageViewType::e1D: return "1D";
+ case ImageViewType::e2D: return "2D";
+ case ImageViewType::e3D: return "3D";
+ case ImageViewType::eCube: return "Cube";
+ case ImageViewType::e1DArray: return "1DArray";
+ case ImageViewType::e2DArray: return "2DArray";
+ case ImageViewType::eCubeArray: return "CubeArray";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
+ {
+ switch (value)
+ {
+ case CommandBufferLevel::ePrimary: return "Primary";
+ case CommandBufferLevel::eSecondary: return "Secondary";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
+ {
+ switch (value)
+ {
+ case ComponentSwizzle::eIdentity: return "Identity";
+ case ComponentSwizzle::eZero: return "Zero";
+ case ComponentSwizzle::eOne: return "One";
+ case ComponentSwizzle::eR: return "R";
+ case ComponentSwizzle::eG: return "G";
+ case ComponentSwizzle::eB: return "B";
+ case ComponentSwizzle::eA: return "A";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
+ {
+ switch (value)
+ {
+ case DescriptorType::eSampler: return "Sampler";
+ case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
+ case DescriptorType::eSampledImage: return "SampledImage";
+ case DescriptorType::eStorageImage: return "StorageImage";
+ case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case DescriptorType::eUniformBuffer: return "UniformBuffer";
+ case DescriptorType::eStorageBuffer: return "StorageBuffer";
+ case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
+ case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
+ case DescriptorType::eInputAttachment: return "InputAttachment";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryType value)
+ {
+ switch (value)
+ {
+ case QueryType::eOcclusion: return "Occlusion";
+ case QueryType::ePipelineStatistics: return "PipelineStatistics";
+ case QueryType::eTimestamp: return "Timestamp";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BorderColor value)
+ {
+ switch (value)
+ {
+ case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
+ case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
+ case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
+ case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
+ case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
+ case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
+ {
+ switch (value)
+ {
+ case PipelineBindPoint::eGraphics: return "Graphics";
+ case PipelineBindPoint::eCompute: return "Compute";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
+ {
+ switch (value)
+ {
+ case PipelineCacheHeaderVersion::eOne: return "One";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
+ {
+ switch (value)
+ {
+ case PrimitiveTopology::ePointList: return "PointList";
+ case PrimitiveTopology::eLineList: return "LineList";
+ case PrimitiveTopology::eLineStrip: return "LineStrip";
+ case PrimitiveTopology::eTriangleList: return "TriangleList";
+ case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
+ case PrimitiveTopology::eTriangleFan: return "TriangleFan";
+ case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
+ case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
+ case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
+ case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
+ case PrimitiveTopology::ePatchList: return "PatchList";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SharingMode value)
+ {
+ switch (value)
+ {
+ case SharingMode::eExclusive: return "Exclusive";
+ case SharingMode::eConcurrent: return "Concurrent";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(IndexType value)
+ {
+ switch (value)
+ {
+ case IndexType::eUint16: return "Uint16";
+ case IndexType::eUint32: return "Uint32";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(Filter value)
+ {
+ switch (value)
+ {
+ case Filter::eNearest: return "Nearest";
+ case Filter::eLinear: return "Linear";
+ case Filter::eCubicIMG: return "CubicIMG";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
+ {
+ switch (value)
+ {
+ case SamplerMipmapMode::eNearest: return "Nearest";
+ case SamplerMipmapMode::eLinear: return "Linear";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
+ {
+ switch (value)
+ {
+ case SamplerAddressMode::eRepeat: return "Repeat";
+ case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
+ case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
+ case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
+ case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CompareOp value)
+ {
+ switch (value)
+ {
+ case CompareOp::eNever: return "Never";
+ case CompareOp::eLess: return "Less";
+ case CompareOp::eEqual: return "Equal";
+ case CompareOp::eLessOrEqual: return "LessOrEqual";
+ case CompareOp::eGreater: return "Greater";
+ case CompareOp::eNotEqual: return "NotEqual";
+ case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
+ case CompareOp::eAlways: return "Always";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
+ {
+ switch (value)
+ {
+ case PolygonMode::eFill: return "Fill";
+ case PolygonMode::eLine: return "Line";
+ case PolygonMode::ePoint: return "Point";
+ case PolygonMode::eFillRectangleNV: return "FillRectangleNV";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
+ {
+ switch (value)
+ {
+ case CullModeFlagBits::eNone: return "None";
+ case CullModeFlagBits::eFront: return "Front";
+ case CullModeFlagBits::eBack: return "Back";
+ case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & CullModeFlagBits::eNone) result += "None | ";
+ if (value & CullModeFlagBits::eFront) result += "Front | ";
+ if (value & CullModeFlagBits::eBack) result += "Back | ";
+ if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FrontFace value)
+ {
+ switch (value)
+ {
+ case FrontFace::eCounterClockwise: return "CounterClockwise";
+ case FrontFace::eClockwise: return "Clockwise";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
+ {
+ switch (value)
+ {
+ case BlendFactor::eZero: return "Zero";
+ case BlendFactor::eOne: return "One";
+ case BlendFactor::eSrcColor: return "SrcColor";
+ case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
+ case BlendFactor::eDstColor: return "DstColor";
+ case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
+ case BlendFactor::eSrcAlpha: return "SrcAlpha";
+ case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
+ case BlendFactor::eDstAlpha: return "DstAlpha";
+ case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
+ case BlendFactor::eConstantColor: return "ConstantColor";
+ case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
+ case BlendFactor::eConstantAlpha: return "ConstantAlpha";
+ case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
+ case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
+ case BlendFactor::eSrc1Color: return "Src1Color";
+ case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
+ case BlendFactor::eSrc1Alpha: return "Src1Alpha";
+ case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BlendOp value)
+ {
+ switch (value)
+ {
+ case BlendOp::eAdd: return "Add";
+ case BlendOp::eSubtract: return "Subtract";
+ case BlendOp::eReverseSubtract: return "ReverseSubtract";
+ case BlendOp::eMin: return "Min";
+ case BlendOp::eMax: return "Max";
+ case BlendOp::eZeroEXT: return "ZeroEXT";
+ case BlendOp::eSrcEXT: return "SrcEXT";
+ case BlendOp::eDstEXT: return "DstEXT";
+ case BlendOp::eSrcOverEXT: return "SrcOverEXT";
+ case BlendOp::eDstOverEXT: return "DstOverEXT";
+ case BlendOp::eSrcInEXT: return "SrcInEXT";
+ case BlendOp::eDstInEXT: return "DstInEXT";
+ case BlendOp::eSrcOutEXT: return "SrcOutEXT";
+ case BlendOp::eDstOutEXT: return "DstOutEXT";
+ case BlendOp::eSrcAtopEXT: return "SrcAtopEXT";
+ case BlendOp::eDstAtopEXT: return "DstAtopEXT";
+ case BlendOp::eXorEXT: return "XorEXT";
+ case BlendOp::eMultiplyEXT: return "MultiplyEXT";
+ case BlendOp::eScreenEXT: return "ScreenEXT";
+ case BlendOp::eOverlayEXT: return "OverlayEXT";
+ case BlendOp::eDarkenEXT: return "DarkenEXT";
+ case BlendOp::eLightenEXT: return "LightenEXT";
+ case BlendOp::eColordodgeEXT: return "ColordodgeEXT";
+ case BlendOp::eColorburnEXT: return "ColorburnEXT";
+ case BlendOp::eHardlightEXT: return "HardlightEXT";
+ case BlendOp::eSoftlightEXT: return "SoftlightEXT";
+ case BlendOp::eDifferenceEXT: return "DifferenceEXT";
+ case BlendOp::eExclusionEXT: return "ExclusionEXT";
+ case BlendOp::eInvertEXT: return "InvertEXT";
+ case BlendOp::eInvertRgbEXT: return "InvertRgbEXT";
+ case BlendOp::eLineardodgeEXT: return "LineardodgeEXT";
+ case BlendOp::eLinearburnEXT: return "LinearburnEXT";
+ case BlendOp::eVividlightEXT: return "VividlightEXT";
+ case BlendOp::eLinearlightEXT: return "LinearlightEXT";
+ case BlendOp::ePinlightEXT: return "PinlightEXT";
+ case BlendOp::eHardmixEXT: return "HardmixEXT";
+ case BlendOp::eHslHueEXT: return "HslHueEXT";
+ case BlendOp::eHslSaturationEXT: return "HslSaturationEXT";
+ case BlendOp::eHslColorEXT: return "HslColorEXT";
+ case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT";
+ case BlendOp::ePlusEXT: return "PlusEXT";
+ case BlendOp::ePlusClampedEXT: return "PlusClampedEXT";
+ case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT";
+ case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT";
+ case BlendOp::eMinusEXT: return "MinusEXT";
+ case BlendOp::eMinusClampedEXT: return "MinusClampedEXT";
+ case BlendOp::eContrastEXT: return "ContrastEXT";
+ case BlendOp::eInvertOvgEXT: return "InvertOvgEXT";
+ case BlendOp::eRedEXT: return "RedEXT";
+ case BlendOp::eGreenEXT: return "GreenEXT";
+ case BlendOp::eBlueEXT: return "BlueEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(StencilOp value)
+ {
+ switch (value)
+ {
+ case StencilOp::eKeep: return "Keep";
+ case StencilOp::eZero: return "Zero";
+ case StencilOp::eReplace: return "Replace";
+ case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
+ case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
+ case StencilOp::eInvert: return "Invert";
+ case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
+ case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(LogicOp value)
+ {
+ switch (value)
+ {
+ case LogicOp::eClear: return "Clear";
+ case LogicOp::eAnd: return "And";
+ case LogicOp::eAndReverse: return "AndReverse";
+ case LogicOp::eCopy: return "Copy";
+ case LogicOp::eAndInverted: return "AndInverted";
+ case LogicOp::eNoOp: return "NoOp";
+ case LogicOp::eXor: return "Xor";
+ case LogicOp::eOr: return "Or";
+ case LogicOp::eNor: return "Nor";
+ case LogicOp::eEquivalent: return "Equivalent";
+ case LogicOp::eInvert: return "Invert";
+ case LogicOp::eOrReverse: return "OrReverse";
+ case LogicOp::eCopyInverted: return "CopyInverted";
+ case LogicOp::eOrInverted: return "OrInverted";
+ case LogicOp::eNand: return "Nand";
+ case LogicOp::eSet: return "Set";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
+ {
+ switch (value)
+ {
+ case InternalAllocationType::eExecutable: return "Executable";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
+ {
+ switch (value)
+ {
+ case SystemAllocationScope::eCommand: return "Command";
+ case SystemAllocationScope::eObject: return "Object";
+ case SystemAllocationScope::eCache: return "Cache";
+ case SystemAllocationScope::eDevice: return "Device";
+ case SystemAllocationScope::eInstance: return "Instance";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
+ {
+ switch (value)
+ {
+ case PhysicalDeviceType::eOther: return "Other";
+ case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
+ case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
+ case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
+ case PhysicalDeviceType::eCpu: return "Cpu";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
+ {
+ switch (value)
+ {
+ case VertexInputRate::eVertex: return "Vertex";
+ case VertexInputRate::eInstance: return "Instance";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(Format value)
+ {
+ switch (value)
+ {
+ case Format::eUndefined: return "Undefined";
+ case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
+ case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
+ case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
+ case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
+ case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
+ case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
+ case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
+ case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
+ case Format::eR8Unorm: return "R8Unorm";
+ case Format::eR8Snorm: return "R8Snorm";
+ case Format::eR8Uscaled: return "R8Uscaled";
+ case Format::eR8Sscaled: return "R8Sscaled";
+ case Format::eR8Uint: return "R8Uint";
+ case Format::eR8Sint: return "R8Sint";
+ case Format::eR8Srgb: return "R8Srgb";
+ case Format::eR8G8Unorm: return "R8G8Unorm";
+ case Format::eR8G8Snorm: return "R8G8Snorm";
+ case Format::eR8G8Uscaled: return "R8G8Uscaled";
+ case Format::eR8G8Sscaled: return "R8G8Sscaled";
+ case Format::eR8G8Uint: return "R8G8Uint";
+ case Format::eR8G8Sint: return "R8G8Sint";
+ case Format::eR8G8Srgb: return "R8G8Srgb";
+ case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
+ case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
+ case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
+ case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
+ case Format::eR8G8B8Uint: return "R8G8B8Uint";
+ case Format::eR8G8B8Sint: return "R8G8B8Sint";
+ case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
+ case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
+ case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
+ case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
+ case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
+ case Format::eB8G8R8Uint: return "B8G8R8Uint";
+ case Format::eB8G8R8Sint: return "B8G8R8Sint";
+ case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
+ case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
+ case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
+ case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
+ case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
+ case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
+ case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
+ case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
+ case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
+ case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
+ case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
+ case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
+ case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
+ case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
+ case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
+ case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
+ case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
+ case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
+ case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
+ case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
+ case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
+ case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
+ case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
+ case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
+ case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
+ case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
+ case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
+ case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
+ case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
+ case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
+ case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
+ case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
+ case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
+ case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
+ case Format::eR16Unorm: return "R16Unorm";
+ case Format::eR16Snorm: return "R16Snorm";
+ case Format::eR16Uscaled: return "R16Uscaled";
+ case Format::eR16Sscaled: return "R16Sscaled";
+ case Format::eR16Uint: return "R16Uint";
+ case Format::eR16Sint: return "R16Sint";
+ case Format::eR16Sfloat: return "R16Sfloat";
+ case Format::eR16G16Unorm: return "R16G16Unorm";
+ case Format::eR16G16Snorm: return "R16G16Snorm";
+ case Format::eR16G16Uscaled: return "R16G16Uscaled";
+ case Format::eR16G16Sscaled: return "R16G16Sscaled";
+ case Format::eR16G16Uint: return "R16G16Uint";
+ case Format::eR16G16Sint: return "R16G16Sint";
+ case Format::eR16G16Sfloat: return "R16G16Sfloat";
+ case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
+ case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
+ case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
+ case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
+ case Format::eR16G16B16Uint: return "R16G16B16Uint";
+ case Format::eR16G16B16Sint: return "R16G16B16Sint";
+ case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
+ case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
+ case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
+ case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
+ case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
+ case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
+ case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
+ case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
+ case Format::eR32Uint: return "R32Uint";
+ case Format::eR32Sint: return "R32Sint";
+ case Format::eR32Sfloat: return "R32Sfloat";
+ case Format::eR32G32Uint: return "R32G32Uint";
+ case Format::eR32G32Sint: return "R32G32Sint";
+ case Format::eR32G32Sfloat: return "R32G32Sfloat";
+ case Format::eR32G32B32Uint: return "R32G32B32Uint";
+ case Format::eR32G32B32Sint: return "R32G32B32Sint";
+ case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
+ case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
+ case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
+ case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
+ case Format::eR64Uint: return "R64Uint";
+ case Format::eR64Sint: return "R64Sint";
+ case Format::eR64Sfloat: return "R64Sfloat";
+ case Format::eR64G64Uint: return "R64G64Uint";
+ case Format::eR64G64Sint: return "R64G64Sint";
+ case Format::eR64G64Sfloat: return "R64G64Sfloat";
+ case Format::eR64G64B64Uint: return "R64G64B64Uint";
+ case Format::eR64G64B64Sint: return "R64G64B64Sint";
+ case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
+ case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
+ case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
+ case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
+ case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
+ case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
+ case Format::eD16Unorm: return "D16Unorm";
+ case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
+ case Format::eD32Sfloat: return "D32Sfloat";
+ case Format::eS8Uint: return "S8Uint";
+ case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
+ case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
+ case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
+ case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
+ case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
+ case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
+ case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
+ case Format::eBc2UnormBlock: return "Bc2UnormBlock";
+ case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
+ case Format::eBc3UnormBlock: return "Bc3UnormBlock";
+ case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
+ case Format::eBc4UnormBlock: return "Bc4UnormBlock";
+ case Format::eBc4SnormBlock: return "Bc4SnormBlock";
+ case Format::eBc5UnormBlock: return "Bc5UnormBlock";
+ case Format::eBc5SnormBlock: return "Bc5SnormBlock";
+ case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
+ case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
+ case Format::eBc7UnormBlock: return "Bc7UnormBlock";
+ case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
+ case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
+ case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
+ case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
+ case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
+ case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
+ case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
+ case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
+ case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
+ case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
+ case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
+ case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
+ case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
+ case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
+ case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
+ case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
+ case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
+ case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
+ case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
+ case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
+ case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
+ case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
+ case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
+ case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
+ case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
+ case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
+ case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
+ case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
+ case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
+ case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
+ case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
+ case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
+ case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
+ case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
+ case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
+ case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
+ case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
+ case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
+ case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
+ case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm";
+ case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm";
+ case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm";
+ case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm";
+ case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm";
+ case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm";
+ case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm";
+ case Format::eR10X6UnormPack16: return "R10X6UnormPack16";
+ case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16";
+ case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16";
+ case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
+ case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
+ case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16";
+ case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16";
+ case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16";
+ case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16";
+ case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16";
+ case Format::eR12X4UnormPack16: return "R12X4UnormPack16";
+ case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16";
+ case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16";
+ case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
+ case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
+ case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16";
+ case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16";
+ case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16";
+ case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16";
+ case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16";
+ case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm";
+ case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm";
+ case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm";
+ case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm";
+ case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm";
+ case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm";
+ case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm";
+ case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
+ case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
+ case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
+ case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
+ case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
+ case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
+ case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
+ case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(StructureType value)
+ {
+ switch (value)
+ {
+ case StructureType::eApplicationInfo: return "ApplicationInfo";
+ case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
+ case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
+ case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
+ case StructureType::eSubmitInfo: return "SubmitInfo";
+ case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
+ case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
+ case StructureType::eBindSparseInfo: return "BindSparseInfo";
+ case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
+ case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
+ case StructureType::eEventCreateInfo: return "EventCreateInfo";
+ case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
+ case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
+ case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
+ case StructureType::eImageCreateInfo: return "ImageCreateInfo";
+ case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
+ case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
+ case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
+ case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
+ case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
+ case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
+ case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
+ case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
+ case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
+ case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
+ case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
+ case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
+ case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
+ case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
+ case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
+ case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
+ case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
+ case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
+ case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
+ case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
+ case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
+ case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
+ case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
+ case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
+ case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
+ case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
+ case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
+ case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
+ case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
+ case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
+ case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
+ case StructureType::eMemoryBarrier: return "MemoryBarrier";
+ case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
+ case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
+ case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties";
+ case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo";
+ case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo";
+ case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures";
+ case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements";
+ case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo";
+ case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo";
+ case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo";
+ case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo";
+ case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo";
+ case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo";
+ case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo";
+ case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo";
+ case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties";
+ case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo";
+ case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2";
+ case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2";
+ case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2";
+ case StructureType::eMemoryRequirements2: return "MemoryRequirements2";
+ case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2";
+ case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2";
+ case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2";
+ case StructureType::eFormatProperties2: return "FormatProperties2";
+ case StructureType::eImageFormatProperties2: return "ImageFormatProperties2";
+ case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2";
+ case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2";
+ case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2";
+ case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2";
+ case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2";
+ case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties";
+ case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo";
+ case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo";
+ case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo";
+ case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo";
+ case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures";
+ case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties";
+ case StructureType::ePhysicalDeviceVariablePointerFeatures: return "PhysicalDeviceVariablePointerFeatures";
+ case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo";
+ case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures";
+ case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties";
+ case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2";
+ case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo";
+ case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo";
+ case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo";
+ case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo";
+ case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures";
+ case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties";
+ case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo";
+ case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo";
+ case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties";
+ case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo";
+ case StructureType::eExternalBufferProperties: return "ExternalBufferProperties";
+ case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties";
+ case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo";
+ case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo";
+ case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo";
+ case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo";
+ case StructureType::eExternalFenceProperties: return "ExternalFenceProperties";
+ case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo";
+ case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo";
+ case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo";
+ case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties";
+ case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties";
+ case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport";
+ case StructureType::ePhysicalDeviceShaderDrawParameterFeatures: return "PhysicalDeviceShaderDrawParameterFeatures";
+ case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
+ case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
+ case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR";
+ case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR";
+ case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR";
+ case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR";
+ case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR";
+ case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR";
+ case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
+ case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
+ case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
+ case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
+ case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
+ case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
+ case StructureType::eMirSurfaceCreateInfoKHR: return "MirSurfaceCreateInfoKHR";
+ case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
+ case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
+ case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
+ case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
+ case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
+ case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
+ case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
+ case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
+ case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
+ case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
+ case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD";
+ case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
+ case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
+ case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
+ case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
+ case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
+ case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
+ case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
+ case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR";
+ case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR";
+ case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR";
+ case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR";
+ case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR";
+ case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR";
+ case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR";
+ case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR";
+ case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR";
+ case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR";
+ case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR";
+ case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR";
+ case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR";
+ case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR";
+ case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR";
+ case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR";
+ case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
+ case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
+ case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
+ case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
+ case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
+ case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
+ case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV";
+ case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
+ case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
+ case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
+ case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
+ case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
+ case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE";
+ case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
+ case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV";
+ case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT";
+ case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
+ case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT";
+ case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT";
+ case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR";
+ case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR";
+ case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR";
+ case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR";
+ case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR";
+ case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR";
+ case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR";
+ case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR";
+ case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR";
+ case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK";
+ case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK";
+ case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT";
+ case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT";
+ case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT";
+ case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT";
+ case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT";
+ case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID";
+ case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID";
+ case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID";
+ case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID";
+ case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID";
+ case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
+ case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT: return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT";
+ case StructureType::eSamplerReductionModeCreateInfoEXT: return "SamplerReductionModeCreateInfoEXT";
+ case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
+ case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
+ case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT";
+ case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT";
+ case StructureType::eImageFormatListCreateInfoKHR: return "ImageFormatListCreateInfoKHR";
+ case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
+ case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
+ case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT";
+ case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV";
+ case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV";
+ case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT";
+ case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT";
+ case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT: return "DescriptorSetLayoutBindingFlagsCreateInfoEXT";
+ case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT: return "PhysicalDeviceDescriptorIndexingFeaturesEXT";
+ case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT: return "PhysicalDeviceDescriptorIndexingPropertiesEXT";
+ case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT: return "DescriptorSetVariableDescriptorCountAllocateInfoEXT";
+ case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT: return "DescriptorSetVariableDescriptorCountLayoutSupportEXT";
+ case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT";
+ case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT";
+ case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT";
+ case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
+ case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD";
+ case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
+ case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: return "PipelineVertexInputDivisorStateCreateInfoEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
+ {
+ switch (value)
+ {
+ case SubpassContents::eInline: return "Inline";
+ case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DynamicState value)
+ {
+ switch (value)
+ {
+ case DynamicState::eViewport: return "Viewport";
+ case DynamicState::eScissor: return "Scissor";
+ case DynamicState::eLineWidth: return "LineWidth";
+ case DynamicState::eDepthBias: return "DepthBias";
+ case DynamicState::eBlendConstants: return "BlendConstants";
+ case DynamicState::eDepthBounds: return "DepthBounds";
+ case DynamicState::eStencilCompareMask: return "StencilCompareMask";
+ case DynamicState::eStencilWriteMask: return "StencilWriteMask";
+ case DynamicState::eStencilReference: return "StencilReference";
+ case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV";
+ case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT";
+ case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateType value)
+ {
+ switch (value)
+ {
+ case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet";
+ case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectType value)
+ {
+ switch (value)
+ {
+ case ObjectType::eUnknown: return "Unknown";
+ case ObjectType::eInstance: return "Instance";
+ case ObjectType::ePhysicalDevice: return "PhysicalDevice";
+ case ObjectType::eDevice: return "Device";
+ case ObjectType::eQueue: return "Queue";
+ case ObjectType::eSemaphore: return "Semaphore";
+ case ObjectType::eCommandBuffer: return "CommandBuffer";
+ case ObjectType::eFence: return "Fence";
+ case ObjectType::eDeviceMemory: return "DeviceMemory";
+ case ObjectType::eBuffer: return "Buffer";
+ case ObjectType::eImage: return "Image";
+ case ObjectType::eEvent: return "Event";
+ case ObjectType::eQueryPool: return "QueryPool";
+ case ObjectType::eBufferView: return "BufferView";
+ case ObjectType::eImageView: return "ImageView";
+ case ObjectType::eShaderModule: return "ShaderModule";
+ case ObjectType::ePipelineCache: return "PipelineCache";
+ case ObjectType::ePipelineLayout: return "PipelineLayout";
+ case ObjectType::eRenderPass: return "RenderPass";
+ case ObjectType::ePipeline: return "Pipeline";
+ case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout";
+ case ObjectType::eSampler: return "Sampler";
+ case ObjectType::eDescriptorPool: return "DescriptorPool";
+ case ObjectType::eDescriptorSet: return "DescriptorSet";
+ case ObjectType::eFramebuffer: return "Framebuffer";
+ case ObjectType::eCommandPool: return "CommandPool";
+ case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
+ case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
+ case ObjectType::eSurfaceKHR: return "SurfaceKHR";
+ case ObjectType::eSwapchainKHR: return "SwapchainKHR";
+ case ObjectType::eDisplayKHR: return "DisplayKHR";
+ case ObjectType::eDisplayModeKHR: return "DisplayModeKHR";
+ case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
+ case ObjectType::eObjectTableNVX: return "ObjectTableNVX";
+ case ObjectType::eIndirectCommandsLayoutNVX: return "IndirectCommandsLayoutNVX";
+ case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT";
+ case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
+ {
+ switch (value)
+ {
+ case QueueFlagBits::eGraphics: return "Graphics";
+ case QueueFlagBits::eCompute: return "Compute";
+ case QueueFlagBits::eTransfer: return "Transfer";
+ case QueueFlagBits::eSparseBinding: return "SparseBinding";
+ case QueueFlagBits::eProtected: return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & QueueFlagBits::eGraphics) result += "Graphics | ";
+ if (value & QueueFlagBits::eCompute) result += "Compute | ";
+ if (value & QueueFlagBits::eTransfer) result += "Transfer | ";
+ if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | ";
+ if (value & QueueFlagBits::eProtected) result += "Protected | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case DeviceQueueCreateFlagBits::eProtected: return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DeviceQueueCreateFlagBits::eProtected) result += "Protected | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
+ {
+ switch (value)
+ {
+ case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
+ case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
+ case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
+ case MemoryPropertyFlagBits::eHostCached: return "HostCached";
+ case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
+ case MemoryPropertyFlagBits::eProtected: return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | ";
+ if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | ";
+ if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | ";
+ if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | ";
+ if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | ";
+ if (value & MemoryPropertyFlagBits::eProtected) result += "Protected | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
+ {
+ switch (value)
+ {
+ case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
+ case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | ";
+ if (value & MemoryHeapFlagBits::eMultiInstance) result += "MultiInstance | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
+ {
+ switch (value)
+ {
+ case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
+ case AccessFlagBits::eIndexRead: return "IndexRead";
+ case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
+ case AccessFlagBits::eUniformRead: return "UniformRead";
+ case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
+ case AccessFlagBits::eShaderRead: return "ShaderRead";
+ case AccessFlagBits::eShaderWrite: return "ShaderWrite";
+ case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
+ case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
+ case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
+ case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
+ case AccessFlagBits::eTransferRead: return "TransferRead";
+ case AccessFlagBits::eTransferWrite: return "TransferWrite";
+ case AccessFlagBits::eHostRead: return "HostRead";
+ case AccessFlagBits::eHostWrite: return "HostWrite";
+ case AccessFlagBits::eMemoryRead: return "MemoryRead";
+ case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
+ case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
+ case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
+ case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | ";
+ if (value & AccessFlagBits::eIndexRead) result += "IndexRead | ";
+ if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | ";
+ if (value & AccessFlagBits::eUniformRead) result += "UniformRead | ";
+ if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | ";
+ if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | ";
+ if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | ";
+ if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | ";
+ if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | ";
+ if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | ";
+ if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | ";
+ if (value & AccessFlagBits::eTransferRead) result += "TransferRead | ";
+ if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | ";
+ if (value & AccessFlagBits::eHostRead) result += "HostRead | ";
+ if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
+ if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
+ if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
+ if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
+ if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
+ if (value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT) result += "ColorAttachmentReadNoncoherentEXT | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
+ {
+ switch (value)
+ {
+ case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
+ case BufferUsageFlagBits::eTransferDst: return "TransferDst";
+ case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
+ case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
+ case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
+ case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
+ case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
+ if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | ";
+ if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
+ if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
+ if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | ";
+ if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | ";
+ if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | ";
+ if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | ";
+ if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
+ case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
+ case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
+ case BufferCreateFlagBits::eProtected: return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
+ if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
+ if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
+ if (value & BufferCreateFlagBits::eProtected) result += "Protected | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
+ {
+ switch (value)
+ {
+ case ShaderStageFlagBits::eVertex: return "Vertex";
+ case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
+ case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
+ case ShaderStageFlagBits::eGeometry: return "Geometry";
+ case ShaderStageFlagBits::eFragment: return "Fragment";
+ case ShaderStageFlagBits::eCompute: return "Compute";
+ case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
+ case ShaderStageFlagBits::eAll: return "All";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ShaderStageFlagBits::eVertex) result += "Vertex | ";
+ if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | ";
+ if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | ";
+ if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | ";
+ if (value & ShaderStageFlagBits::eFragment) result += "Fragment | ";
+ if (value & ShaderStageFlagBits::eCompute) result += "Compute | ";
+ if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | ";
+ if (value & ShaderStageFlagBits::eAll) result += "All | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
+ {
+ switch (value)
+ {
+ case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
+ case ImageUsageFlagBits::eTransferDst: return "TransferDst";
+ case ImageUsageFlagBits::eSampled: return "Sampled";
+ case ImageUsageFlagBits::eStorage: return "Storage";
+ case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
+ case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
+ case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
+ case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
+ if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | ";
+ if (value & ImageUsageFlagBits::eSampled) result += "Sampled | ";
+ if (value & ImageUsageFlagBits::eStorage) result += "Storage | ";
+ if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | ";
+ if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
+ if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | ";
+ if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
+ case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
+ case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
+ case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
+ case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
+ case ImageCreateFlagBits::eAlias: return "Alias";
+ case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
+ case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible";
+ case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible";
+ case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage";
+ case ImageCreateFlagBits::eProtected: return "Protected";
+ case ImageCreateFlagBits::eDisjoint: return "Disjoint";
+ case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
+ if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
+ if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
+ if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | ";
+ if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | ";
+ if (value & ImageCreateFlagBits::eAlias) result += "Alias | ";
+ if (value & ImageCreateFlagBits::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | ";
+ if (value & ImageCreateFlagBits::e2DArrayCompatible) result += "2DArrayCompatible | ";
+ if (value & ImageCreateFlagBits::eBlockTexelViewCompatible) result += "BlockTexelViewCompatible | ";
+ if (value & ImageCreateFlagBits::eExtendedUsage) result += "ExtendedUsage | ";
+ if (value & ImageCreateFlagBits::eProtected) result += "Protected | ";
+ if (value & ImageCreateFlagBits::eDisjoint) result += "Disjoint | ";
+ if (value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) result += "SampleLocationsCompatibleDepthEXT | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
+ case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
+ case PipelineCreateFlagBits::eDerivative: return "Derivative";
+ case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
+ case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | ";
+ if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | ";
+ if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | ";
+ if (value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex) result += "ViewIndexFromDeviceIndex | ";
+ if (value & PipelineCreateFlagBits::eDispatchBase) result += "DispatchBase | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
+ {
+ switch (value)
+ {
+ case ColorComponentFlagBits::eR: return "R";
+ case ColorComponentFlagBits::eG: return "G";
+ case ColorComponentFlagBits::eB: return "B";
+ case ColorComponentFlagBits::eA: return "A";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ColorComponentFlagBits::eR) result += "R | ";
+ if (value & ColorComponentFlagBits::eG) result += "G | ";
+ if (value & ColorComponentFlagBits::eB) result += "B | ";
+ if (value & ColorComponentFlagBits::eA) result += "A | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case FenceCreateFlagBits::eSignaled: return "Signaled";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
+ {
+ switch (value)
+ {
+ case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
+ case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
+ case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
+ case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
+ case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
+ case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
+ case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
+ case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
+ case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
+ case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
+ case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
+ case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc";
+ case FormatFeatureFlagBits::eTransferDst: return "TransferDst";
+ case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+ case FormatFeatureFlagBits::eDisjoint: return "Disjoint";
+ case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples";
+ case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
+ case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT: return "SampledImageFilterMinmaxEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | ";
+ if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | ";
+ if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | ";
+ if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
+ if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
+ if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | ";
+ if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | ";
+ if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | ";
+ if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | ";
+ if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
+ if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | ";
+ if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | ";
+ if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | ";
+ if (value & FormatFeatureFlagBits::eTransferSrc) result += "TransferSrc | ";
+ if (value & FormatFeatureFlagBits::eTransferDst) result += "TransferDst | ";
+ if (value & FormatFeatureFlagBits::eMidpointChromaSamples) result += "MidpointChromaSamples | ";
+ if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) result += "SampledImageYcbcrConversionLinearFilter | ";
+ if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+ if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+ if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+ if (value & FormatFeatureFlagBits::eDisjoint) result += "Disjoint | ";
+ if (value & FormatFeatureFlagBits::eCositedChromaSamples) result += "CositedChromaSamples | ";
+ if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | ";
+ if (value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) result += "SampledImageFilterMinmaxEXT | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
+ {
+ switch (value)
+ {
+ case QueryControlFlagBits::ePrecise: return "Precise";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & QueryControlFlagBits::ePrecise) result += "Precise | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
+ {
+ switch (value)
+ {
+ case QueryResultFlagBits::e64: return "64";
+ case QueryResultFlagBits::eWait: return "Wait";
+ case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
+ case QueryResultFlagBits::ePartial: return "Partial";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & QueryResultFlagBits::e64) result += "64 | ";
+ if (value & QueryResultFlagBits::eWait) result += "Wait | ";
+ if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | ";
+ if (value & QueryResultFlagBits::ePartial) result += "Partial | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
+ {
+ switch (value)
+ {
+ case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
+ case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
+ case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | ";
+ if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | ";
+ if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
+ {
+ switch (value)
+ {
+ case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
+ case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
+ case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
+ case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
+ case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
+ case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
+ case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | ";
+ if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | ";
+ if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | ";
+ if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | ";
+ if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | ";
+ if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | ";
+ if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | ";
+ if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | ";
+ if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | ";
+ if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | ";
+ if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
+ {
+ switch (value)
+ {
+ case ImageAspectFlagBits::eColor: return "Color";
+ case ImageAspectFlagBits::eDepth: return "Depth";
+ case ImageAspectFlagBits::eStencil: return "Stencil";
+ case ImageAspectFlagBits::eMetadata: return "Metadata";
+ case ImageAspectFlagBits::ePlane0: return "Plane0";
+ case ImageAspectFlagBits::ePlane1: return "Plane1";
+ case ImageAspectFlagBits::ePlane2: return "Plane2";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ImageAspectFlagBits::eColor) result += "Color | ";
+ if (value & ImageAspectFlagBits::eDepth) result += "Depth | ";
+ if (value & ImageAspectFlagBits::eStencil) result += "Stencil | ";
+ if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | ";
+ if (value & ImageAspectFlagBits::ePlane0) result += "Plane0 | ";
+ if (value & ImageAspectFlagBits::ePlane1) result += "Plane1 | ";
+ if (value & ImageAspectFlagBits::ePlane2) result += "Plane2 | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
+ {
+ switch (value)
+ {
+ case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
+ case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
+ case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | ";
+ if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | ";
+ if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
+ {
+ switch (value)
+ {
+ case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
+ {
+ switch (value)
+ {
+ case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
+ case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
+ case PipelineStageFlagBits::eVertexInput: return "VertexInput";
+ case PipelineStageFlagBits::eVertexShader: return "VertexShader";
+ case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
+ case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
+ case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
+ case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
+ case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
+ case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
+ case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
+ case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
+ case PipelineStageFlagBits::eTransfer: return "Transfer";
+ case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
+ case PipelineStageFlagBits::eHost: return "Host";
+ case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
+ case PipelineStageFlagBits::eAllCommands: return "AllCommands";
+ case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | ";
+ if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | ";
+ if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | ";
+ if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | ";
+ if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | ";
+ if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | ";
+ if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | ";
+ if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | ";
+ if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | ";
+ if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | ";
+ if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | ";
+ if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | ";
+ if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | ";
+ if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | ";
+ if (value & PipelineStageFlagBits::eHost) result += "Host | ";
+ if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
+ if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
+ if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case CommandPoolCreateFlagBits::eTransient: return "Transient";
+ case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
+ case CommandPoolCreateFlagBits::eProtected: return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | ";
+ if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | ";
+ if (value & CommandPoolCreateFlagBits::eProtected) result += "Protected | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
+ {
+ switch (value)
+ {
+ case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
+ {
+ switch (value)
+ {
+ case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
+ {
+ switch (value)
+ {
+ case SampleCountFlagBits::e1: return "1";
+ case SampleCountFlagBits::e2: return "2";
+ case SampleCountFlagBits::e4: return "4";
+ case SampleCountFlagBits::e8: return "8";
+ case SampleCountFlagBits::e16: return "16";
+ case SampleCountFlagBits::e32: return "32";
+ case SampleCountFlagBits::e64: return "64";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SampleCountFlagBits::e1) result += "1 | ";
+ if (value & SampleCountFlagBits::e2) result += "2 | ";
+ if (value & SampleCountFlagBits::e4) result += "4 | ";
+ if (value & SampleCountFlagBits::e8) result += "8 | ";
+ if (value & SampleCountFlagBits::e16) result += "16 | ";
+ if (value & SampleCountFlagBits::e32) result += "32 | ";
+ if (value & SampleCountFlagBits::e64) result += "64 | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
+ {
+ switch (value)
+ {
+ case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
+ {
+ switch (value)
+ {
+ case StencilFaceFlagBits::eFront: return "Front";
+ case StencilFaceFlagBits::eBack: return "Back";
+ case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & StencilFaceFlagBits::eFront) result += "Front | ";
+ if (value & StencilFaceFlagBits::eBack) result += "Back | ";
+ if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
+ case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT: return "UpdateAfterBindEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | ";
+ if (value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) result += "UpdateAfterBindEXT | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
+ {
+ switch (value)
+ {
+ case DependencyFlagBits::eByRegion: return "ByRegion";
+ case DependencyFlagBits::eDeviceGroup: return "DeviceGroup";
+ case DependencyFlagBits::eViewLocal: return "ViewLocal";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DependencyFlagBits::eByRegion) result += "ByRegion | ";
+ if (value & DependencyFlagBits::eDeviceGroup) result += "DeviceGroup | ";
+ if (value & DependencyFlagBits::eViewLocal) result += "ViewLocal | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
+ {
+ switch (value)
+ {
+ case PresentModeKHR::eImmediate: return "Immediate";
+ case PresentModeKHR::eMailbox: return "Mailbox";
+ case PresentModeKHR::eFifo: return "Fifo";
+ case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
+ case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh";
+ case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
+ {
+ switch (value)
+ {
+ case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
+ case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
+ case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT";
+ case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT";
+ case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
+ case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
+ case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
+ case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
+ case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT";
+ case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT";
+ case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT";
+ case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
+ case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
+ case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT";
+ case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
+ {
+ switch (value)
+ {
+ case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
+ case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
+ case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
+ case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
+ if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | ";
+ if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | ";
+ if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
+ {
+ switch (value)
+ {
+ case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
+ case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
+ case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
+ case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
+ if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | ";
+ if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | ";
+ if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
+ {
+ switch (value)
+ {
+ case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
+ case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
+ case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
+ case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
+ case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | ";
+ if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | ";
+ if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | ";
+ if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | ";
+ if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | ";
+ if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | ";
+ if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | ";
+ if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | ";
+ if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
+ {
+ switch (value)
+ {
+ case DebugReportFlagBitsEXT::eInformation: return "Information";
+ case DebugReportFlagBitsEXT::eWarning: return "Warning";
+ case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
+ case DebugReportFlagBitsEXT::eError: return "Error";
+ case DebugReportFlagBitsEXT::eDebug: return "Debug";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | ";
+ if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | ";
+ if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | ";
+ if (value & DebugReportFlagBitsEXT::eError) result += "Error | ";
+ if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
+ {
+ switch (value)
+ {
+ case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
+ case DebugReportObjectTypeEXT::eInstance: return "Instance";
+ case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
+ case DebugReportObjectTypeEXT::eDevice: return "Device";
+ case DebugReportObjectTypeEXT::eQueue: return "Queue";
+ case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
+ case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
+ case DebugReportObjectTypeEXT::eFence: return "Fence";
+ case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
+ case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
+ case DebugReportObjectTypeEXT::eImage: return "Image";
+ case DebugReportObjectTypeEXT::eEvent: return "Event";
+ case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
+ case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
+ case DebugReportObjectTypeEXT::eImageView: return "ImageView";
+ case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
+ case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
+ case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
+ case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
+ case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
+ case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
+ case DebugReportObjectTypeEXT::eSampler: return "Sampler";
+ case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
+ case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
+ case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
+ case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
+ case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
+ case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
+ case DebugReportObjectTypeEXT::eDebugReportCallbackExt: return "DebugReportCallbackExt";
+ case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
+ case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
+ case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
+ case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
+ case DebugReportObjectTypeEXT::eValidationCacheExt: return "ValidationCacheExt";
+ case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
+ case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
+ {
+ switch (value)
+ {
+ case RasterizationOrderAMD::eStrict: return "Strict";
+ case RasterizationOrderAMD::eRelaxed: return "Relaxed";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
+ {
+ switch (value)
+ {
+ case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
+ case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | ";
+ if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+ if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | ";
+ if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
+ {
+ switch (value)
+ {
+ case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
+ case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
+ case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | ";
+ if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | ";
+ if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
+ {
+ switch (value)
+ {
+ case ValidationCheckEXT::eAll: return "All";
+ case ValidationCheckEXT::eShaders: return "Shaders";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlagBits value)
+ {
+ switch (value)
+ {
+ case SubgroupFeatureFlagBits::eBasic: return "Basic";
+ case SubgroupFeatureFlagBits::eVote: return "Vote";
+ case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic";
+ case SubgroupFeatureFlagBits::eBallot: return "Ballot";
+ case SubgroupFeatureFlagBits::eShuffle: return "Shuffle";
+ case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative";
+ case SubgroupFeatureFlagBits::eClustered: return "Clustered";
+ case SubgroupFeatureFlagBits::eQuad: return "Quad";
+ case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SubgroupFeatureFlagBits::eBasic) result += "Basic | ";
+ if (value & SubgroupFeatureFlagBits::eVote) result += "Vote | ";
+ if (value & SubgroupFeatureFlagBits::eArithmetic) result += "Arithmetic | ";
+ if (value & SubgroupFeatureFlagBits::eBallot) result += "Ballot | ";
+ if (value & SubgroupFeatureFlagBits::eShuffle) result += "Shuffle | ";
+ if (value & SubgroupFeatureFlagBits::eShuffleRelative) result += "ShuffleRelative | ";
+ if (value & SubgroupFeatureFlagBits::eClustered) result += "Clustered | ";
+ if (value & SubgroupFeatureFlagBits::eQuad) result += "Quad | ";
+ if (value & SubgroupFeatureFlagBits::ePartitionedNV) result += "PartitionedNV | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
+ {
+ switch (value)
+ {
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
+ if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
+ {
+ switch (value)
+ {
+ case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
+ case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
+ if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
+ {
+ switch (value)
+ {
+ case IndirectCommandsTokenTypeNVX::ePipeline: return "Pipeline";
+ case IndirectCommandsTokenTypeNVX::eDescriptorSet: return "DescriptorSet";
+ case IndirectCommandsTokenTypeNVX::eIndexBuffer: return "IndexBuffer";
+ case IndirectCommandsTokenTypeNVX::eVertexBuffer: return "VertexBuffer";
+ case IndirectCommandsTokenTypeNVX::ePushConstant: return "PushConstant";
+ case IndirectCommandsTokenTypeNVX::eDrawIndexed: return "DrawIndexed";
+ case IndirectCommandsTokenTypeNVX::eDraw: return "Draw";
+ case IndirectCommandsTokenTypeNVX::eDispatch: return "Dispatch";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
+ {
+ switch (value)
+ {
+ case ObjectEntryTypeNVX::eDescriptorSet: return "DescriptorSet";
+ case ObjectEntryTypeNVX::ePipeline: return "Pipeline";
+ case ObjectEntryTypeNVX::eIndexBuffer: return "IndexBuffer";
+ case ObjectEntryTypeNVX::eVertexBuffer: return "VertexBuffer";
+ case ObjectEntryTypeNVX::ePushConstant: return "PushConstant";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits value)
+ {
+ switch (value)
+ {
+ case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
+ case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT: return "UpdateAfterBindPoolEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) result += "PushDescriptorKHR | ";
+ if (value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) result += "UpdateAfterBindPoolEXT | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBits value)
+ {
+ switch (value)
+ {
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture";
+ case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt";
+ case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap";
+ case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource";
+ case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT";
+ case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID";
+ case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT";
+ case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture) result += "D3D11Texture | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) result += "D3D11TextureKmt | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap) result += "D3D12Heap | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource) result += "D3D12Resource | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) result += "DmaBufEXT | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) result += "AndroidHardwareBufferANDROID | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) result += "HostAllocationEXT | ";
+ if (value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) result += "HostMappedForeignMemoryEXT | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBits value)
+ {
+ switch (value)
+ {
+ case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly";
+ case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable";
+ case ExternalMemoryFeatureFlagBits::eImportable: return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalMemoryFeatureFlagBits::eDedicatedOnly) result += "DedicatedOnly | ";
+ if (value & ExternalMemoryFeatureFlagBits::eExportable) result += "Exportable | ";
+ if (value & ExternalMemoryFeatureFlagBits::eImportable) result += "Importable | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlagBits value)
+ {
+ switch (value)
+ {
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence";
+ case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | ";
+ if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | ";
+ if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+ if (value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) result += "D3D12Fence | ";
+ if (value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd) result += "SyncFd | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlagBits value)
+ {
+ switch (value)
+ {
+ case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable";
+ case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalSemaphoreFeatureFlagBits::eExportable) result += "Exportable | ";
+ if (value & ExternalSemaphoreFeatureFlagBits::eImportable) result += "Importable | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlagBits value)
+ {
+ switch (value)
+ {
+ case SemaphoreImportFlagBits::eTemporary: return "Temporary";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SemaphoreImportFlagBits::eTemporary) result += "Temporary | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlagBits value)
+ {
+ switch (value)
+ {
+ case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
+ case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalFenceHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | ";
+ if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | ";
+ if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+ if (value & ExternalFenceHandleTypeFlagBits::eSyncFd) result += "SyncFd | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlagBits value)
+ {
+ switch (value)
+ {
+ case ExternalFenceFeatureFlagBits::eExportable: return "Exportable";
+ case ExternalFenceFeatureFlagBits::eImportable: return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & ExternalFenceFeatureFlagBits::eExportable) result += "Exportable | ";
+ if (value & ExternalFenceFeatureFlagBits::eImportable) result += "Importable | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FenceImportFlagBits value)
+ {
+ switch (value)
+ {
+ case FenceImportFlagBits::eTemporary: return "Temporary";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(FenceImportFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & FenceImportFlagBits::eTemporary) result += "Temporary | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value)
+ {
+ switch (value)
+ {
+ case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SurfaceCounterFlagBitsEXT::eVblank) result += "Vblank | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value)
+ {
+ switch (value)
+ {
+ case DisplayPowerStateEXT::eOff: return "Off";
+ case DisplayPowerStateEXT::eSuspend: return "Suspend";
+ case DisplayPowerStateEXT::eOn: return "On";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value)
+ {
+ switch (value)
+ {
+ case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value)
+ {
+ switch (value)
+ {
+ case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlagBits value)
+ {
+ switch (value)
+ {
+ case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc";
+ case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst";
+ case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc";
+ case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & PeerMemoryFeatureFlagBits::eCopySrc) result += "CopySrc | ";
+ if (value & PeerMemoryFeatureFlagBits::eCopyDst) result += "CopyDst | ";
+ if (value & PeerMemoryFeatureFlagBits::eGenericSrc) result += "GenericSrc | ";
+ if (value & PeerMemoryFeatureFlagBits::eGenericDst) result += "GenericDst | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlagBits value)
+ {
+ switch (value)
+ {
+ case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & MemoryAllocateFlagBits::eDeviceMask) result += "DeviceMask | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagBitsKHR value)
+ {
+ switch (value)
+ {
+ case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local";
+ case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote";
+ case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum";
+ case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagsKHR value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DeviceGroupPresentModeFlagBitsKHR::eLocal) result += "Local | ";
+ if (value & DeviceGroupPresentModeFlagBitsKHR::eRemote) result += "Remote | ";
+ if (value & DeviceGroupPresentModeFlagBitsKHR::eSum) result += "Sum | ";
+ if (value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) result += "LocalMultiDevice | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR value)
+ {
+ switch (value)
+ {
+ case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
+ case SwapchainCreateFlagBitsKHR::eProtected: return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | ";
+ if (value & SwapchainCreateFlagBitsKHR::eProtected) result += "Protected | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ViewportCoordinateSwizzleNV value)
+ {
+ switch (value)
+ {
+ case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX";
+ case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX";
+ case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY";
+ case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY";
+ case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ";
+ case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ";
+ case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW";
+ case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DiscardRectangleModeEXT value)
+ {
+ switch (value)
+ {
+ case DiscardRectangleModeEXT::eInclusive: return "Inclusive";
+ case DiscardRectangleModeEXT::eExclusive: return "Exclusive";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits value)
+ {
+ switch (value)
+ {
+ case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX";
+ case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & SubpassDescriptionFlagBits::ePerViewAttributesNVX) result += "PerViewAttributesNVX | ";
+ if (value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) result += "PerViewPositionXOnlyNVX | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(PointClippingBehavior value)
+ {
+ switch (value)
+ {
+ case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes";
+ case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerReductionModeEXT value)
+ {
+ switch (value)
+ {
+ case SamplerReductionModeEXT::eWeightedAverage: return "WeightedAverage";
+ case SamplerReductionModeEXT::eMin: return "Min";
+ case SamplerReductionModeEXT::eMax: return "Max";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(TessellationDomainOrigin value)
+ {
+ switch (value)
+ {
+ case TessellationDomainOrigin::eUpperLeft: return "UpperLeft";
+ case TessellationDomainOrigin::eLowerLeft: return "LowerLeft";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrModelConversion value)
+ {
+ switch (value)
+ {
+ case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity";
+ case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity";
+ case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709";
+ case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601";
+ case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrRange value)
+ {
+ switch (value)
+ {
+ case SamplerYcbcrRange::eItuFull: return "ItuFull";
+ case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ChromaLocation value)
+ {
+ switch (value)
+ {
+ case ChromaLocation::eCositedEven: return "CositedEven";
+ case ChromaLocation::eMidpoint: return "Midpoint";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(BlendOverlapEXT value)
+ {
+ switch (value)
+ {
+ case BlendOverlapEXT::eUncorrelated: return "Uncorrelated";
+ case BlendOverlapEXT::eDisjoint: return "Disjoint";
+ case BlendOverlapEXT::eConjoint: return "Conjoint";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(CoverageModulationModeNV value)
+ {
+ switch (value)
+ {
+ case CoverageModulationModeNV::eNone: return "None";
+ case CoverageModulationModeNV::eRgb: return "Rgb";
+ case CoverageModulationModeNV::eAlpha: return "Alpha";
+ case CoverageModulationModeNV::eRgba: return "Rgba";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ValidationCacheHeaderVersionEXT value)
+ {
+ switch (value)
+ {
+ case ValidationCacheHeaderVersionEXT::eOne: return "One";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ShaderInfoTypeAMD value)
+ {
+ switch (value)
+ {
+ case ShaderInfoTypeAMD::eStatistics: return "Statistics";
+ case ShaderInfoTypeAMD::eBinary: return "Binary";
+ case ShaderInfoTypeAMD::eDisassembly: return "Disassembly";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(QueueGlobalPriorityEXT value)
+ {
+ switch (value)
+ {
+ case QueueGlobalPriorityEXT::eLow: return "Low";
+ case QueueGlobalPriorityEXT::eMedium: return "Medium";
+ case QueueGlobalPriorityEXT::eHigh: return "High";
+ case QueueGlobalPriorityEXT::eRealtime: return "Realtime";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagBitsEXT value)
+ {
+ switch (value)
+ {
+ case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagsEXT value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) result += "Verbose | ";
+ if (value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo) result += "Info | ";
+ if (value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning) result += "Warning | ";
+ if (value & DebugUtilsMessageSeverityFlagBitsEXT::eError) result += "Error | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagBitsEXT value)
+ {
+ switch (value)
+ {
+ case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General";
+ case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation";
+ case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagsEXT value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral) result += "General | ";
+ if (value & DebugUtilsMessageTypeFlagBitsEXT::eValidation) result += "Validation | ";
+ if (value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance) result += "Performance | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(ConservativeRasterizationModeEXT value)
+ {
+ switch (value)
+ {
+ case ConservativeRasterizationModeEXT::eDisabled: return "Disabled";
+ case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate";
+ case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagBitsEXT value)
+ {
+ switch (value)
+ {
+ case DescriptorBindingFlagBitsEXT::eUpdateAfterBind: return "UpdateAfterBind";
+ case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending";
+ case DescriptorBindingFlagBitsEXT::ePartiallyBound: return "PartiallyBound";
+ case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount: return "VariableDescriptorCount";
+ default: return "invalid";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagsEXT value)
+ {
+ if (!value) return "{}";
+ std::string result;
+ if (value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind) result += "UpdateAfterBind | ";
+ if (value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) result += "UpdateUnusedWhilePending | ";
+ if (value & DescriptorBindingFlagBitsEXT::ePartiallyBound) result += "PartiallyBound | ";
+ if (value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) result += "VariableDescriptorCount | ";
+ return "{" + result.substr(0, result.size() - 3) + "}";
+ }
+
+ class DispatchLoaderDynamic
+ {
+ public:
+ PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
+ PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+ PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
+ PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
+ PFN_vkAllocateMemory vkAllocateMemory = 0;
+ PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
+ PFN_vkBindBufferMemory vkBindBufferMemory = 0;
+ PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
+ PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+ PFN_vkBindImageMemory vkBindImageMemory = 0;
+ PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+ PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+ PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
+ PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
+ PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
+ PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
+ PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
+ PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+ PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+ PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+ PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
+ PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
+ PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+ PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+ PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+ PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+ PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+ PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
+ PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
+ PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
+ PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
+ PFN_vkCmdDispatch vkCmdDispatch = 0;
+ PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
+ PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+ PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
+ PFN_vkCmdDraw vkCmdDraw = 0;
+ PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
+ PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
+ PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
+ PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
+ PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
+ PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
+ PFN_vkCmdEndQuery vkCmdEndQuery = 0;
+ PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
+ PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+ PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
+ PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+ PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
+ PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+ PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0;
+ PFN_vkCmdPushConstants vkCmdPushConstants = 0;
+ PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
+ PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
+ PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0;
+ PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+ PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
+ PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+ PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
+ PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
+ PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
+ PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+ PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+ PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
+ PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+ PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
+ PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
+ PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+ PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+ PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+ PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
+ PFN_vkCmdSetViewport vkCmdSetViewport = 0;
+ PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+ PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
+ PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+ PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
+ PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ PFN_vkCreateBuffer vkCreateBuffer = 0;
+ PFN_vkCreateBufferView vkCreateBufferView = 0;
+ PFN_vkCreateCommandPool vkCreateCommandPool = 0;
+ PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
+ PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
+ PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+ PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
+ PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
+ PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
+ PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+ PFN_vkCreateDevice vkCreateDevice = 0;
+ PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
+ PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
+ PFN_vkCreateEvent vkCreateEvent = 0;
+ PFN_vkCreateFence vkCreateFence = 0;
+ PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
+ PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+ PFN_vkCreateImage vkCreateImage = 0;
+ PFN_vkCreateImageView vkCreateImageView = 0;
+ PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0;
+ PFN_vkCreateInstance vkCreateInstance = 0;
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ PFN_vkCreateMirSurfaceKHR vkCreateMirSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+ PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0;
+ PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
+ PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+ PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+ PFN_vkCreateRenderPass vkCreateRenderPass = 0;
+ PFN_vkCreateSampler vkCreateSampler = 0;
+ PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
+ PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+ PFN_vkCreateSemaphore vkCreateSemaphore = 0;
+ PFN_vkCreateShaderModule vkCreateShaderModule = 0;
+ PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+ PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
+ PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
+#ifdef VK_USE_PLATFORM_VI_NN
+ PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
+ PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
+ PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+ PFN_vkDestroyBuffer vkDestroyBuffer = 0;
+ PFN_vkDestroyBufferView vkDestroyBufferView = 0;
+ PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
+ PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
+ PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+ PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
+ PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
+ PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
+ PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+ PFN_vkDestroyDevice vkDestroyDevice = 0;
+ PFN_vkDestroyEvent vkDestroyEvent = 0;
+ PFN_vkDestroyFence vkDestroyFence = 0;
+ PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
+ PFN_vkDestroyImage vkDestroyImage = 0;
+ PFN_vkDestroyImageView vkDestroyImageView = 0;
+ PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0;
+ PFN_vkDestroyInstance vkDestroyInstance = 0;
+ PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0;
+ PFN_vkDestroyPipeline vkDestroyPipeline = 0;
+ PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
+ PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+ PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
+ PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
+ PFN_vkDestroySampler vkDestroySampler = 0;
+ PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
+ PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+ PFN_vkDestroySemaphore vkDestroySemaphore = 0;
+ PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
+ PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
+ PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
+ PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
+ PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
+ PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
+ PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
+ PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
+ PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+ PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
+ PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
+ PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+ PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+ PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+ PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
+ PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
+ PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
+ PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
+ PFN_vkFreeMemory vkFreeMemory = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
+ PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
+ PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+ PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+ PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
+ PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+ PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+ PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
+ PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
+ PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+ PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+ PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
+ PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
+ PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
+ PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
+ PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
+ PFN_vkGetEventStatus vkGetEventStatus = 0;
+ PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
+ PFN_vkGetFenceStatus vkGetFenceStatus = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
+ PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
+ PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+ PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
+ PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
+ PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+ PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+ PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
+ PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
+ PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
+ PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
+ PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+ PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+ PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
+ PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+ PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
+ PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+ PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
+ PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
+ PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0;
+ PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
+ PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
+ PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
+ PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
+ PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ PFN_vkGetPhysicalDeviceMirPresentationSupportKHR vkGetPhysicalDeviceMirPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+ PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
+ PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
+ PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
+ PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
+ PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
+ PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
+ PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
+ PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
+ PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
+ PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+ PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+ PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
+ PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+ PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
+ PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
+ PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
+ PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
+ PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
+ PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
+ PFN_vkMapMemory vkMapMemory = 0;
+ PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
+ PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
+ PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
+ PFN_vkQueueBindSparse vkQueueBindSparse = 0;
+ PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
+ PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
+ PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
+ PFN_vkQueueSubmit vkQueueSubmit = 0;
+ PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
+ PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
+ PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
+ PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0;
+ PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+ PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
+ PFN_vkResetCommandPool vkResetCommandPool = 0;
+ PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
+ PFN_vkResetEvent vkResetEvent = 0;
+ PFN_vkResetFences vkResetFences = 0;
+ PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
+ PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
+ PFN_vkSetEvent vkSetEvent = 0;
+ PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+ PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
+ PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+ PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+ PFN_vkUnmapMemory vkUnmapMemory = 0;
+ PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0;
+ PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
+ PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+ PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
+ PFN_vkWaitForFences vkWaitForFences = 0;
+ public:
+ DispatchLoaderDynamic(Instance instance = Instance(), Device device = Device())
+ {
+ if (instance)
+ {
+ init(instance, device);
+ }
+ }
+
+ void init(Instance instance, Device device = Device())
+ {
+ vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR(device ? device.getProcAddr( "vkAcquireNextImage2KHR") : instance.getProcAddr( "vkAcquireNextImage2KHR"));
+ vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR(device ? device.getProcAddr( "vkAcquireNextImageKHR") : instance.getProcAddr( "vkAcquireNextImageKHR"));
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT(device ? device.getProcAddr( "vkAcquireXlibDisplayEXT") : instance.getProcAddr( "vkAcquireXlibDisplayEXT"));
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+ vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers(device ? device.getProcAddr( "vkAllocateCommandBuffers") : instance.getProcAddr( "vkAllocateCommandBuffers"));
+ vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets(device ? device.getProcAddr( "vkAllocateDescriptorSets") : instance.getProcAddr( "vkAllocateDescriptorSets"));
+ vkAllocateMemory = PFN_vkAllocateMemory(device ? device.getProcAddr( "vkAllocateMemory") : instance.getProcAddr( "vkAllocateMemory"));
+ vkBeginCommandBuffer = PFN_vkBeginCommandBuffer(device ? device.getProcAddr( "vkBeginCommandBuffer") : instance.getProcAddr( "vkBeginCommandBuffer"));
+ vkBindBufferMemory = PFN_vkBindBufferMemory(device ? device.getProcAddr( "vkBindBufferMemory") : instance.getProcAddr( "vkBindBufferMemory"));
+ vkBindBufferMemory2 = PFN_vkBindBufferMemory2(device ? device.getProcAddr( "vkBindBufferMemory2") : instance.getProcAddr( "vkBindBufferMemory2"));
+ vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR(device ? device.getProcAddr( "vkBindBufferMemory2KHR") : instance.getProcAddr( "vkBindBufferMemory2KHR"));
+ vkBindImageMemory = PFN_vkBindImageMemory(device ? device.getProcAddr( "vkBindImageMemory") : instance.getProcAddr( "vkBindImageMemory"));
+ vkBindImageMemory2 = PFN_vkBindImageMemory2(device ? device.getProcAddr( "vkBindImageMemory2") : instance.getProcAddr( "vkBindImageMemory2"));
+ vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR(device ? device.getProcAddr( "vkBindImageMemory2KHR") : instance.getProcAddr( "vkBindImageMemory2KHR"));
+ vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT"));
+ vkCmdBeginQuery = PFN_vkCmdBeginQuery(device ? device.getProcAddr( "vkCmdBeginQuery") : instance.getProcAddr( "vkCmdBeginQuery"));
+ vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass(device ? device.getProcAddr( "vkCmdBeginRenderPass") : instance.getProcAddr( "vkCmdBeginRenderPass"));
+ vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets(device ? device.getProcAddr( "vkCmdBindDescriptorSets") : instance.getProcAddr( "vkCmdBindDescriptorSets"));
+ vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer(device ? device.getProcAddr( "vkCmdBindIndexBuffer") : instance.getProcAddr( "vkCmdBindIndexBuffer"));
+ vkCmdBindPipeline = PFN_vkCmdBindPipeline(device ? device.getProcAddr( "vkCmdBindPipeline") : instance.getProcAddr( "vkCmdBindPipeline"));
+ vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers(device ? device.getProcAddr( "vkCmdBindVertexBuffers") : instance.getProcAddr( "vkCmdBindVertexBuffers"));
+ vkCmdBlitImage = PFN_vkCmdBlitImage(device ? device.getProcAddr( "vkCmdBlitImage") : instance.getProcAddr( "vkCmdBlitImage"));
+ vkCmdClearAttachments = PFN_vkCmdClearAttachments(device ? device.getProcAddr( "vkCmdClearAttachments") : instance.getProcAddr( "vkCmdClearAttachments"));
+ vkCmdClearColorImage = PFN_vkCmdClearColorImage(device ? device.getProcAddr( "vkCmdClearColorImage") : instance.getProcAddr( "vkCmdClearColorImage"));
+ vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage(device ? device.getProcAddr( "vkCmdClearDepthStencilImage") : instance.getProcAddr( "vkCmdClearDepthStencilImage"));
+ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer(device ? device.getProcAddr( "vkCmdCopyBuffer") : instance.getProcAddr( "vkCmdCopyBuffer"));
+ vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage(device ? device.getProcAddr( "vkCmdCopyBufferToImage") : instance.getProcAddr( "vkCmdCopyBufferToImage"));
+ vkCmdCopyImage = PFN_vkCmdCopyImage(device ? device.getProcAddr( "vkCmdCopyImage") : instance.getProcAddr( "vkCmdCopyImage"));
+ vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer(device ? device.getProcAddr( "vkCmdCopyImageToBuffer") : instance.getProcAddr( "vkCmdCopyImageToBuffer"));
+ vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults(device ? device.getProcAddr( "vkCmdCopyQueryPoolResults") : instance.getProcAddr( "vkCmdCopyQueryPoolResults"));
+ vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT(device ? device.getProcAddr( "vkCmdDebugMarkerBeginEXT") : instance.getProcAddr( "vkCmdDebugMarkerBeginEXT"));
+ vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT(device ? device.getProcAddr( "vkCmdDebugMarkerEndEXT") : instance.getProcAddr( "vkCmdDebugMarkerEndEXT"));
+ vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT(device ? device.getProcAddr( "vkCmdDebugMarkerInsertEXT") : instance.getProcAddr( "vkCmdDebugMarkerInsertEXT"));
+ vkCmdDispatch = PFN_vkCmdDispatch(device ? device.getProcAddr( "vkCmdDispatch") : instance.getProcAddr( "vkCmdDispatch"));
+ vkCmdDispatchBase = PFN_vkCmdDispatchBase(device ? device.getProcAddr( "vkCmdDispatchBase") : instance.getProcAddr( "vkCmdDispatchBase"));
+ vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR(device ? device.getProcAddr( "vkCmdDispatchBaseKHR") : instance.getProcAddr( "vkCmdDispatchBaseKHR"));
+ vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect(device ? device.getProcAddr( "vkCmdDispatchIndirect") : instance.getProcAddr( "vkCmdDispatchIndirect"));
+ vkCmdDraw = PFN_vkCmdDraw(device ? device.getProcAddr( "vkCmdDraw") : instance.getProcAddr( "vkCmdDraw"));
+ vkCmdDrawIndexed = PFN_vkCmdDrawIndexed(device ? device.getProcAddr( "vkCmdDrawIndexed") : instance.getProcAddr( "vkCmdDrawIndexed"));
+ vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect(device ? device.getProcAddr( "vkCmdDrawIndexedIndirect") : instance.getProcAddr( "vkCmdDrawIndexedIndirect"));
+ vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD"));
+ vkCmdDrawIndirect = PFN_vkCmdDrawIndirect(device ? device.getProcAddr( "vkCmdDrawIndirect") : instance.getProcAddr( "vkCmdDrawIndirect"));
+ vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndirectCountAMD"));
+ vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdEndDebugUtilsLabelEXT"));
+ vkCmdEndQuery = PFN_vkCmdEndQuery(device ? device.getProcAddr( "vkCmdEndQuery") : instance.getProcAddr( "vkCmdEndQuery"));
+ vkCmdEndRenderPass = PFN_vkCmdEndRenderPass(device ? device.getProcAddr( "vkCmdEndRenderPass") : instance.getProcAddr( "vkCmdEndRenderPass"));
+ vkCmdExecuteCommands = PFN_vkCmdExecuteCommands(device ? device.getProcAddr( "vkCmdExecuteCommands") : instance.getProcAddr( "vkCmdExecuteCommands"));
+ vkCmdFillBuffer = PFN_vkCmdFillBuffer(device ? device.getProcAddr( "vkCmdFillBuffer") : instance.getProcAddr( "vkCmdFillBuffer"));
+ vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT"));
+ vkCmdNextSubpass = PFN_vkCmdNextSubpass(device ? device.getProcAddr( "vkCmdNextSubpass") : instance.getProcAddr( "vkCmdNextSubpass"));
+ vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier(device ? device.getProcAddr( "vkCmdPipelineBarrier") : instance.getProcAddr( "vkCmdPipelineBarrier"));
+ vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX(device ? device.getProcAddr( "vkCmdProcessCommandsNVX") : instance.getProcAddr( "vkCmdProcessCommandsNVX"));
+ vkCmdPushConstants = PFN_vkCmdPushConstants(device ? device.getProcAddr( "vkCmdPushConstants") : instance.getProcAddr( "vkCmdPushConstants"));
+ vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetKHR"));
+ vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR"));
+ vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX(device ? device.getProcAddr( "vkCmdReserveSpaceForCommandsNVX") : instance.getProcAddr( "vkCmdReserveSpaceForCommandsNVX"));
+ vkCmdResetEvent = PFN_vkCmdResetEvent(device ? device.getProcAddr( "vkCmdResetEvent") : instance.getProcAddr( "vkCmdResetEvent"));
+ vkCmdResetQueryPool = PFN_vkCmdResetQueryPool(device ? device.getProcAddr( "vkCmdResetQueryPool") : instance.getProcAddr( "vkCmdResetQueryPool"));
+ vkCmdResolveImage = PFN_vkCmdResolveImage(device ? device.getProcAddr( "vkCmdResolveImage") : instance.getProcAddr( "vkCmdResolveImage"));
+ vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants(device ? device.getProcAddr( "vkCmdSetBlendConstants") : instance.getProcAddr( "vkCmdSetBlendConstants"));
+ vkCmdSetDepthBias = PFN_vkCmdSetDepthBias(device ? device.getProcAddr( "vkCmdSetDepthBias") : instance.getProcAddr( "vkCmdSetDepthBias"));
+ vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds(device ? device.getProcAddr( "vkCmdSetDepthBounds") : instance.getProcAddr( "vkCmdSetDepthBounds"));
+ vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask(device ? device.getProcAddr( "vkCmdSetDeviceMask") : instance.getProcAddr( "vkCmdSetDeviceMask"));
+ vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR(device ? device.getProcAddr( "vkCmdSetDeviceMaskKHR") : instance.getProcAddr( "vkCmdSetDeviceMaskKHR"));
+ vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT(device ? device.getProcAddr( "vkCmdSetDiscardRectangleEXT") : instance.getProcAddr( "vkCmdSetDiscardRectangleEXT"));
+ vkCmdSetEvent = PFN_vkCmdSetEvent(device ? device.getProcAddr( "vkCmdSetEvent") : instance.getProcAddr( "vkCmdSetEvent"));
+ vkCmdSetLineWidth = PFN_vkCmdSetLineWidth(device ? device.getProcAddr( "vkCmdSetLineWidth") : instance.getProcAddr( "vkCmdSetLineWidth"));
+ vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT(device ? device.getProcAddr( "vkCmdSetSampleLocationsEXT") : instance.getProcAddr( "vkCmdSetSampleLocationsEXT"));
+ vkCmdSetScissor = PFN_vkCmdSetScissor(device ? device.getProcAddr( "vkCmdSetScissor") : instance.getProcAddr( "vkCmdSetScissor"));
+ vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask(device ? device.getProcAddr( "vkCmdSetStencilCompareMask") : instance.getProcAddr( "vkCmdSetStencilCompareMask"));
+ vkCmdSetStencilReference = PFN_vkCmdSetStencilReference(device ? device.getProcAddr( "vkCmdSetStencilReference") : instance.getProcAddr( "vkCmdSetStencilReference"));
+ vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask(device ? device.getProcAddr( "vkCmdSetStencilWriteMask") : instance.getProcAddr( "vkCmdSetStencilWriteMask"));
+ vkCmdSetViewport = PFN_vkCmdSetViewport(device ? device.getProcAddr( "vkCmdSetViewport") : instance.getProcAddr( "vkCmdSetViewport"));
+ vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV(device ? device.getProcAddr( "vkCmdSetViewportWScalingNV") : instance.getProcAddr( "vkCmdSetViewportWScalingNV"));
+ vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer(device ? device.getProcAddr( "vkCmdUpdateBuffer") : instance.getProcAddr( "vkCmdUpdateBuffer"));
+ vkCmdWaitEvents = PFN_vkCmdWaitEvents(device ? device.getProcAddr( "vkCmdWaitEvents") : instance.getProcAddr( "vkCmdWaitEvents"));
+ vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD(device ? device.getProcAddr( "vkCmdWriteBufferMarkerAMD") : instance.getProcAddr( "vkCmdWriteBufferMarkerAMD"));
+ vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp(device ? device.getProcAddr( "vkCmdWriteTimestamp") : instance.getProcAddr( "vkCmdWriteTimestamp"));
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR(instance.getProcAddr( "vkCreateAndroidSurfaceKHR"));
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ vkCreateBuffer = PFN_vkCreateBuffer(device ? device.getProcAddr( "vkCreateBuffer") : instance.getProcAddr( "vkCreateBuffer"));
+ vkCreateBufferView = PFN_vkCreateBufferView(device ? device.getProcAddr( "vkCreateBufferView") : instance.getProcAddr( "vkCreateBufferView"));
+ vkCreateCommandPool = PFN_vkCreateCommandPool(device ? device.getProcAddr( "vkCreateCommandPool") : instance.getProcAddr( "vkCreateCommandPool"));
+ vkCreateComputePipelines = PFN_vkCreateComputePipelines(device ? device.getProcAddr( "vkCreateComputePipelines") : instance.getProcAddr( "vkCreateComputePipelines"));
+ vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT(instance.getProcAddr( "vkCreateDebugReportCallbackEXT"));
+ vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT(instance.getProcAddr( "vkCreateDebugUtilsMessengerEXT"));
+ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool(device ? device.getProcAddr( "vkCreateDescriptorPool") : instance.getProcAddr( "vkCreateDescriptorPool"));
+ vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout(device ? device.getProcAddr( "vkCreateDescriptorSetLayout") : instance.getProcAddr( "vkCreateDescriptorSetLayout"));
+ vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplate") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplate"));
+ vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR"));
+ vkCreateDevice = PFN_vkCreateDevice(device ? device.getProcAddr( "vkCreateDevice") : instance.getProcAddr( "vkCreateDevice"));
+ vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR(device ? device.getProcAddr( "vkCreateDisplayModeKHR") : instance.getProcAddr( "vkCreateDisplayModeKHR"));
+ vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR(instance.getProcAddr( "vkCreateDisplayPlaneSurfaceKHR"));
+ vkCreateEvent = PFN_vkCreateEvent(device ? device.getProcAddr( "vkCreateEvent") : instance.getProcAddr( "vkCreateEvent"));
+ vkCreateFence = PFN_vkCreateFence(device ? device.getProcAddr( "vkCreateFence") : instance.getProcAddr( "vkCreateFence"));
+ vkCreateFramebuffer = PFN_vkCreateFramebuffer(device ? device.getProcAddr( "vkCreateFramebuffer") : instance.getProcAddr( "vkCreateFramebuffer"));
+ vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines(device ? device.getProcAddr( "vkCreateGraphicsPipelines") : instance.getProcAddr( "vkCreateGraphicsPipelines"));
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK(instance.getProcAddr( "vkCreateIOSSurfaceMVK"));
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+ vkCreateImage = PFN_vkCreateImage(device ? device.getProcAddr( "vkCreateImage") : instance.getProcAddr( "vkCreateImage"));
+ vkCreateImageView = PFN_vkCreateImageView(device ? device.getProcAddr( "vkCreateImageView") : instance.getProcAddr( "vkCreateImageView"));
+ vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkCreateIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkCreateIndirectCommandsLayoutNVX"));
+ vkCreateInstance = PFN_vkCreateInstance(instance.getProcAddr( "vkCreateInstance"));
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK(instance.getProcAddr( "vkCreateMacOSSurfaceMVK"));
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ vkCreateMirSurfaceKHR = PFN_vkCreateMirSurfaceKHR(instance.getProcAddr( "vkCreateMirSurfaceKHR"));
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+ vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX(device ? device.getProcAddr( "vkCreateObjectTableNVX") : instance.getProcAddr( "vkCreateObjectTableNVX"));
+ vkCreatePipelineCache = PFN_vkCreatePipelineCache(device ? device.getProcAddr( "vkCreatePipelineCache") : instance.getProcAddr( "vkCreatePipelineCache"));
+ vkCreatePipelineLayout = PFN_vkCreatePipelineLayout(device ? device.getProcAddr( "vkCreatePipelineLayout") : instance.getProcAddr( "vkCreatePipelineLayout"));
+ vkCreateQueryPool = PFN_vkCreateQueryPool(device ? device.getProcAddr( "vkCreateQueryPool") : instance.getProcAddr( "vkCreateQueryPool"));
+ vkCreateRenderPass = PFN_vkCreateRenderPass(device ? device.getProcAddr( "vkCreateRenderPass") : instance.getProcAddr( "vkCreateRenderPass"));
+ vkCreateSampler = PFN_vkCreateSampler(device ? device.getProcAddr( "vkCreateSampler") : instance.getProcAddr( "vkCreateSampler"));
+ vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversion") : instance.getProcAddr( "vkCreateSamplerYcbcrConversion"));
+ vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversionKHR") : instance.getProcAddr( "vkCreateSamplerYcbcrConversionKHR"));
+ vkCreateSemaphore = PFN_vkCreateSemaphore(device ? device.getProcAddr( "vkCreateSemaphore") : instance.getProcAddr( "vkCreateSemaphore"));
+ vkCreateShaderModule = PFN_vkCreateShaderModule(device ? device.getProcAddr( "vkCreateShaderModule") : instance.getProcAddr( "vkCreateShaderModule"));
+ vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR(device ? device.getProcAddr( "vkCreateSharedSwapchainsKHR") : instance.getProcAddr( "vkCreateSharedSwapchainsKHR"));
+ vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR(device ? device.getProcAddr( "vkCreateSwapchainKHR") : instance.getProcAddr( "vkCreateSwapchainKHR"));
+ vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT(device ? device.getProcAddr( "vkCreateValidationCacheEXT") : instance.getProcAddr( "vkCreateValidationCacheEXT"));
+#ifdef VK_USE_PLATFORM_VI_NN
+ vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN(instance.getProcAddr( "vkCreateViSurfaceNN"));
+#endif /*VK_USE_PLATFORM_VI_NN*/
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR(instance.getProcAddr( "vkCreateWaylandSurfaceKHR"));
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR(instance.getProcAddr( "vkCreateWin32SurfaceKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR(instance.getProcAddr( "vkCreateXcbSurfaceKHR"));
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR(instance.getProcAddr( "vkCreateXlibSurfaceKHR"));
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectNameEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectNameEXT"));
+ vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectTagEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectTagEXT"));
+ vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT(instance.getProcAddr( "vkDebugReportMessageEXT"));
+ vkDestroyBuffer = PFN_vkDestroyBuffer(device ? device.getProcAddr( "vkDestroyBuffer") : instance.getProcAddr( "vkDestroyBuffer"));
+ vkDestroyBufferView = PFN_vkDestroyBufferView(device ? device.getProcAddr( "vkDestroyBufferView") : instance.getProcAddr( "vkDestroyBufferView"));
+ vkDestroyCommandPool = PFN_vkDestroyCommandPool(device ? device.getProcAddr( "vkDestroyCommandPool") : instance.getProcAddr( "vkDestroyCommandPool"));
+ vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT(instance.getProcAddr( "vkDestroyDebugReportCallbackEXT"));
+ vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT(instance.getProcAddr( "vkDestroyDebugUtilsMessengerEXT"));
+ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool(device ? device.getProcAddr( "vkDestroyDescriptorPool") : instance.getProcAddr( "vkDestroyDescriptorPool"));
+ vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout(device ? device.getProcAddr( "vkDestroyDescriptorSetLayout") : instance.getProcAddr( "vkDestroyDescriptorSetLayout"));
+ vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplate") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplate"));
+ vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR"));
+ vkDestroyDevice = PFN_vkDestroyDevice(device ? device.getProcAddr( "vkDestroyDevice") : instance.getProcAddr( "vkDestroyDevice"));
+ vkDestroyEvent = PFN_vkDestroyEvent(device ? device.getProcAddr( "vkDestroyEvent") : instance.getProcAddr( "vkDestroyEvent"));
+ vkDestroyFence = PFN_vkDestroyFence(device ? device.getProcAddr( "vkDestroyFence") : instance.getProcAddr( "vkDestroyFence"));
+ vkDestroyFramebuffer = PFN_vkDestroyFramebuffer(device ? device.getProcAddr( "vkDestroyFramebuffer") : instance.getProcAddr( "vkDestroyFramebuffer"));
+ vkDestroyImage = PFN_vkDestroyImage(device ? device.getProcAddr( "vkDestroyImage") : instance.getProcAddr( "vkDestroyImage"));
+ vkDestroyImageView = PFN_vkDestroyImageView(device ? device.getProcAddr( "vkDestroyImageView") : instance.getProcAddr( "vkDestroyImageView"));
+ vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX"));
+ vkDestroyInstance = PFN_vkDestroyInstance(instance.getProcAddr( "vkDestroyInstance"));
+ vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX(device ? device.getProcAddr( "vkDestroyObjectTableNVX") : instance.getProcAddr( "vkDestroyObjectTableNVX"));
+ vkDestroyPipeline = PFN_vkDestroyPipeline(device ? device.getProcAddr( "vkDestroyPipeline") : instance.getProcAddr( "vkDestroyPipeline"));
+ vkDestroyPipelineCache = PFN_vkDestroyPipelineCache(device ? device.getProcAddr( "vkDestroyPipelineCache") : instance.getProcAddr( "vkDestroyPipelineCache"));
+ vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout(device ? device.getProcAddr( "vkDestroyPipelineLayout") : instance.getProcAddr( "vkDestroyPipelineLayout"));
+ vkDestroyQueryPool = PFN_vkDestroyQueryPool(device ? device.getProcAddr( "vkDestroyQueryPool") : instance.getProcAddr( "vkDestroyQueryPool"));
+ vkDestroyRenderPass = PFN_vkDestroyRenderPass(device ? device.getProcAddr( "vkDestroyRenderPass") : instance.getProcAddr( "vkDestroyRenderPass"));
+ vkDestroySampler = PFN_vkDestroySampler(device ? device.getProcAddr( "vkDestroySampler") : instance.getProcAddr( "vkDestroySampler"));
+ vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversion") : instance.getProcAddr( "vkDestroySamplerYcbcrConversion"));
+ vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversionKHR") : instance.getProcAddr( "vkDestroySamplerYcbcrConversionKHR"));
+ vkDestroySemaphore = PFN_vkDestroySemaphore(device ? device.getProcAddr( "vkDestroySemaphore") : instance.getProcAddr( "vkDestroySemaphore"));
+ vkDestroyShaderModule = PFN_vkDestroyShaderModule(device ? device.getProcAddr( "vkDestroyShaderModule") : instance.getProcAddr( "vkDestroyShaderModule"));
+ vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR(instance.getProcAddr( "vkDestroySurfaceKHR"));
+ vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR(device ? device.getProcAddr( "vkDestroySwapchainKHR") : instance.getProcAddr( "vkDestroySwapchainKHR"));
+ vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT(device ? device.getProcAddr( "vkDestroyValidationCacheEXT") : instance.getProcAddr( "vkDestroyValidationCacheEXT"));
+ vkDeviceWaitIdle = PFN_vkDeviceWaitIdle(device ? device.getProcAddr( "vkDeviceWaitIdle") : instance.getProcAddr( "vkDeviceWaitIdle"));
+ vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT(device ? device.getProcAddr( "vkDisplayPowerControlEXT") : instance.getProcAddr( "vkDisplayPowerControlEXT"));
+ vkEndCommandBuffer = PFN_vkEndCommandBuffer(device ? device.getProcAddr( "vkEndCommandBuffer") : instance.getProcAddr( "vkEndCommandBuffer"));
+ vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(device ? device.getProcAddr( "vkEnumerateDeviceExtensionProperties") : instance.getProcAddr( "vkEnumerateDeviceExtensionProperties"));
+ vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties(device ? device.getProcAddr( "vkEnumerateDeviceLayerProperties") : instance.getProcAddr( "vkEnumerateDeviceLayerProperties"));
+ vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties(instance.getProcAddr( "vkEnumerateInstanceExtensionProperties"));
+ vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties(instance.getProcAddr( "vkEnumerateInstanceLayerProperties"));
+ vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion(instance.getProcAddr( "vkEnumerateInstanceVersion"));
+ vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroups"));
+ vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroupsKHR"));
+ vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices(instance.getProcAddr( "vkEnumeratePhysicalDevices"));
+ vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges(device ? device.getProcAddr( "vkFlushMappedMemoryRanges") : instance.getProcAddr( "vkFlushMappedMemoryRanges"));
+ vkFreeCommandBuffers = PFN_vkFreeCommandBuffers(device ? device.getProcAddr( "vkFreeCommandBuffers") : instance.getProcAddr( "vkFreeCommandBuffers"));
+ vkFreeDescriptorSets = PFN_vkFreeDescriptorSets(device ? device.getProcAddr( "vkFreeDescriptorSets") : instance.getProcAddr( "vkFreeDescriptorSets"));
+ vkFreeMemory = PFN_vkFreeMemory(device ? device.getProcAddr( "vkFreeMemory") : instance.getProcAddr( "vkFreeMemory"));
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(device ? device.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID") : instance.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID"));
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements(device ? device.getProcAddr( "vkGetBufferMemoryRequirements") : instance.getProcAddr( "vkGetBufferMemoryRequirements"));
+ vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2") : instance.getProcAddr( "vkGetBufferMemoryRequirements2"));
+ vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2KHR") : instance.getProcAddr( "vkGetBufferMemoryRequirements2KHR"));
+ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupport") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupport"));
+ vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR"));
+ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures"));
+ vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR"));
+ vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR") : instance.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR"));
+ vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(device ? device.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR") : instance.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR"));
+ vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment(device ? device.getProcAddr( "vkGetDeviceMemoryCommitment") : instance.getProcAddr( "vkGetDeviceMemoryCommitment"));
+ vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr(device ? device.getProcAddr( "vkGetDeviceProcAddr") : instance.getProcAddr( "vkGetDeviceProcAddr"));
+ vkGetDeviceQueue = PFN_vkGetDeviceQueue(device ? device.getProcAddr( "vkGetDeviceQueue") : instance.getProcAddr( "vkGetDeviceQueue"));
+ vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2(device ? device.getProcAddr( "vkGetDeviceQueue2") : instance.getProcAddr( "vkGetDeviceQueue2"));
+ vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR(device ? device.getProcAddr( "vkGetDisplayModePropertiesKHR") : instance.getProcAddr( "vkGetDisplayModePropertiesKHR"));
+ vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR(device ? device.getProcAddr( "vkGetDisplayPlaneCapabilitiesKHR") : instance.getProcAddr( "vkGetDisplayPlaneCapabilitiesKHR"));
+ vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(device ? device.getProcAddr( "vkGetDisplayPlaneSupportedDisplaysKHR") : instance.getProcAddr( "vkGetDisplayPlaneSupportedDisplaysKHR"));
+ vkGetEventStatus = PFN_vkGetEventStatus(device ? device.getProcAddr( "vkGetEventStatus") : instance.getProcAddr( "vkGetEventStatus"));
+ vkGetFenceFdKHR = PFN_vkGetFenceFdKHR(device ? device.getProcAddr( "vkGetFenceFdKHR") : instance.getProcAddr( "vkGetFenceFdKHR"));
+ vkGetFenceStatus = PFN_vkGetFenceStatus(device ? device.getProcAddr( "vkGetFenceStatus") : instance.getProcAddr( "vkGetFenceStatus"));
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR(device ? device.getProcAddr( "vkGetFenceWin32HandleKHR") : instance.getProcAddr( "vkGetFenceWin32HandleKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements(device ? device.getProcAddr( "vkGetImageMemoryRequirements") : instance.getProcAddr( "vkGetImageMemoryRequirements"));
+ vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2(device ? device.getProcAddr( "vkGetImageMemoryRequirements2") : instance.getProcAddr( "vkGetImageMemoryRequirements2"));
+ vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageMemoryRequirements2KHR"));
+ vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements"));
+ vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2"));
+ vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR"));
+ vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout(device ? device.getProcAddr( "vkGetImageSubresourceLayout") : instance.getProcAddr( "vkGetImageSubresourceLayout"));
+ vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr(instance.getProcAddr( "vkGetInstanceProcAddr"));
+#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+ vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(device ? device.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID") : instance.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID"));
+#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR(device ? device.getProcAddr( "vkGetMemoryFdKHR") : instance.getProcAddr( "vkGetMemoryFdKHR"));
+ vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR(device ? device.getProcAddr( "vkGetMemoryFdPropertiesKHR") : instance.getProcAddr( "vkGetMemoryFdPropertiesKHR"));
+ vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(device ? device.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT") : instance.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT"));
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandleKHR") : instance.getProcAddr( "vkGetMemoryWin32HandleKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_NV
+ vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV(device ? device.getProcAddr( "vkGetMemoryWin32HandleNV") : instance.getProcAddr( "vkGetMemoryWin32HandleNV"));
+#endif /*VK_USE_PLATFORM_WIN32_NV*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR") : instance.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE(device ? device.getProcAddr( "vkGetPastPresentationTimingGOOGLE") : instance.getProcAddr( "vkGetPastPresentationTimingGOOGLE"));
+ vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceDisplayPlanePropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceDisplayPlanePropertiesKHR"));
+ vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceDisplayPropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceDisplayPropertiesKHR"));
+ vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalBufferProperties") : instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferProperties"));
+ vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalBufferPropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferPropertiesKHR"));
+ vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalFenceProperties") : instance.getProcAddr( "vkGetPhysicalDeviceExternalFenceProperties"));
+ vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalFencePropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceExternalFencePropertiesKHR"));
+ vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalImageFormatPropertiesNV") : instance.getProcAddr( "vkGetPhysicalDeviceExternalImageFormatPropertiesNV"));
+ vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalSemaphoreProperties") : instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphoreProperties"));
+ vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"));
+ vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures(device ? device.getProcAddr( "vkGetPhysicalDeviceFeatures") : instance.getProcAddr( "vkGetPhysicalDeviceFeatures"));
+ vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2(device ? device.getProcAddr( "vkGetPhysicalDeviceFeatures2") : instance.getProcAddr( "vkGetPhysicalDeviceFeatures2"));
+ vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceFeatures2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceFeatures2KHR"));
+ vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceFormatProperties") : instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties"));
+ vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceFormatProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2"));
+ vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceFormatProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2KHR"));
+ vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(device ? device.getProcAddr( "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX") : instance.getProcAddr( "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX"));
+ vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties") : instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties"));
+ vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2"));
+ vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2KHR"));
+ vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceMemoryProperties") : instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties"));
+ vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2"));
+ vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2KHR"));
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ vkGetPhysicalDeviceMirPresentationSupportKHR = PFN_vkGetPhysicalDeviceMirPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceMirPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceMirPresentationSupportKHR"));
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+ vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(device ? device.getProcAddr( "vkGetPhysicalDeviceMultisamplePropertiesEXT") : instance.getProcAddr( "vkGetPhysicalDeviceMultisamplePropertiesEXT"));
+ vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(device ? device.getProcAddr( "vkGetPhysicalDevicePresentRectanglesKHR") : instance.getProcAddr( "vkGetPhysicalDevicePresentRectanglesKHR"));
+ vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceProperties") : instance.getProcAddr( "vkGetPhysicalDeviceProperties"));
+ vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceProperties2"));
+ vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceProperties2KHR"));
+ vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties") : instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties"));
+ vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2"));
+ vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2KHR"));
+ vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties") : instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties"));
+ vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2"));
+ vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2KHR"));
+ vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2EXT") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2EXT"));
+ vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2KHR"));
+ vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilitiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"));
+ vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceFormats2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormats2KHR"));
+ vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceFormatsKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormatsKHR"));
+ vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfacePresentModesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfacePresentModesKHR"));
+ vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceSupportKHR"));
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceWaylandPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceWaylandPresentationSupportKHR"));
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceWin32PresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceWin32PresentationSupportKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceXcbPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceXcbPresentationSupportKHR"));
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceXlibPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceXlibPresentationSupportKHR"));
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+ vkGetPipelineCacheData = PFN_vkGetPipelineCacheData(device ? device.getProcAddr( "vkGetPipelineCacheData") : instance.getProcAddr( "vkGetPipelineCacheData"));
+ vkGetQueryPoolResults = PFN_vkGetQueryPoolResults(device ? device.getProcAddr( "vkGetQueryPoolResults") : instance.getProcAddr( "vkGetQueryPoolResults"));
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+ vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT(device ? device.getProcAddr( "vkGetRandROutputDisplayEXT") : instance.getProcAddr( "vkGetRandROutputDisplayEXT"));
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+ vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE(device ? device.getProcAddr( "vkGetRefreshCycleDurationGOOGLE") : instance.getProcAddr( "vkGetRefreshCycleDurationGOOGLE"));
+ vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity(device ? device.getProcAddr( "vkGetRenderAreaGranularity") : instance.getProcAddr( "vkGetRenderAreaGranularity"));
+ vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR(device ? device.getProcAddr( "vkGetSemaphoreFdKHR") : instance.getProcAddr( "vkGetSemaphoreFdKHR"));
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkGetSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkGetSemaphoreWin32HandleKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD(device ? device.getProcAddr( "vkGetShaderInfoAMD") : instance.getProcAddr( "vkGetShaderInfoAMD"));
+ vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT(device ? device.getProcAddr( "vkGetSwapchainCounterEXT") : instance.getProcAddr( "vkGetSwapchainCounterEXT"));
+ vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR(device ? device.getProcAddr( "vkGetSwapchainImagesKHR") : instance.getProcAddr( "vkGetSwapchainImagesKHR"));
+ vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR(device ? device.getProcAddr( "vkGetSwapchainStatusKHR") : instance.getProcAddr( "vkGetSwapchainStatusKHR"));
+ vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT(device ? device.getProcAddr( "vkGetValidationCacheDataEXT") : instance.getProcAddr( "vkGetValidationCacheDataEXT"));
+ vkImportFenceFdKHR = PFN_vkImportFenceFdKHR(device ? device.getProcAddr( "vkImportFenceFdKHR") : instance.getProcAddr( "vkImportFenceFdKHR"));
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR(device ? device.getProcAddr( "vkImportFenceWin32HandleKHR") : instance.getProcAddr( "vkImportFenceWin32HandleKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR(device ? device.getProcAddr( "vkImportSemaphoreFdKHR") : instance.getProcAddr( "vkImportSemaphoreFdKHR"));
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkImportSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkImportSemaphoreWin32HandleKHR"));
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges(device ? device.getProcAddr( "vkInvalidateMappedMemoryRanges") : instance.getProcAddr( "vkInvalidateMappedMemoryRanges"));
+ vkMapMemory = PFN_vkMapMemory(device ? device.getProcAddr( "vkMapMemory") : instance.getProcAddr( "vkMapMemory"));
+ vkMergePipelineCaches = PFN_vkMergePipelineCaches(device ? device.getProcAddr( "vkMergePipelineCaches") : instance.getProcAddr( "vkMergePipelineCaches"));
+ vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT(device ? device.getProcAddr( "vkMergeValidationCachesEXT") : instance.getProcAddr( "vkMergeValidationCachesEXT"));
+ vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT"));
+ vkQueueBindSparse = PFN_vkQueueBindSparse(device ? device.getProcAddr( "vkQueueBindSparse") : instance.getProcAddr( "vkQueueBindSparse"));
+ vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueEndDebugUtilsLabelEXT"));
+ vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT"));
+ vkQueuePresentKHR = PFN_vkQueuePresentKHR(device ? device.getProcAddr( "vkQueuePresentKHR") : instance.getProcAddr( "vkQueuePresentKHR"));
+ vkQueueSubmit = PFN_vkQueueSubmit(device ? device.getProcAddr( "vkQueueSubmit") : instance.getProcAddr( "vkQueueSubmit"));
+ vkQueueWaitIdle = PFN_vkQueueWaitIdle(device ? device.getProcAddr( "vkQueueWaitIdle") : instance.getProcAddr( "vkQueueWaitIdle"));
+ vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT(device ? device.getProcAddr( "vkRegisterDeviceEventEXT") : instance.getProcAddr( "vkRegisterDeviceEventEXT"));
+ vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT(device ? device.getProcAddr( "vkRegisterDisplayEventEXT") : instance.getProcAddr( "vkRegisterDisplayEventEXT"));
+ vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX(device ? device.getProcAddr( "vkRegisterObjectsNVX") : instance.getProcAddr( "vkRegisterObjectsNVX"));
+ vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT(device ? device.getProcAddr( "vkReleaseDisplayEXT") : instance.getProcAddr( "vkReleaseDisplayEXT"));
+ vkResetCommandBuffer = PFN_vkResetCommandBuffer(device ? device.getProcAddr( "vkResetCommandBuffer") : instance.getProcAddr( "vkResetCommandBuffer"));
+ vkResetCommandPool = PFN_vkResetCommandPool(device ? device.getProcAddr( "vkResetCommandPool") : instance.getProcAddr( "vkResetCommandPool"));
+ vkResetDescriptorPool = PFN_vkResetDescriptorPool(device ? device.getProcAddr( "vkResetDescriptorPool") : instance.getProcAddr( "vkResetDescriptorPool"));
+ vkResetEvent = PFN_vkResetEvent(device ? device.getProcAddr( "vkResetEvent") : instance.getProcAddr( "vkResetEvent"));
+ vkResetFences = PFN_vkResetFences(device ? device.getProcAddr( "vkResetFences") : instance.getProcAddr( "vkResetFences"));
+ vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectNameEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectNameEXT"));
+ vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectTagEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectTagEXT"));
+ vkSetEvent = PFN_vkSetEvent(device ? device.getProcAddr( "vkSetEvent") : instance.getProcAddr( "vkSetEvent"));
+ vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT(device ? device.getProcAddr( "vkSetHdrMetadataEXT") : instance.getProcAddr( "vkSetHdrMetadataEXT"));
+ vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT(instance.getProcAddr( "vkSubmitDebugUtilsMessageEXT"));
+ vkTrimCommandPool = PFN_vkTrimCommandPool(device ? device.getProcAddr( "vkTrimCommandPool") : instance.getProcAddr( "vkTrimCommandPool"));
+ vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR(device ? device.getProcAddr( "vkTrimCommandPoolKHR") : instance.getProcAddr( "vkTrimCommandPoolKHR"));
+ vkUnmapMemory = PFN_vkUnmapMemory(device ? device.getProcAddr( "vkUnmapMemory") : instance.getProcAddr( "vkUnmapMemory"));
+ vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX(device ? device.getProcAddr( "vkUnregisterObjectsNVX") : instance.getProcAddr( "vkUnregisterObjectsNVX"));
+ vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplate") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplate"));
+ vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR"));
+ vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets(device ? device.getProcAddr( "vkUpdateDescriptorSets") : instance.getProcAddr( "vkUpdateDescriptorSets"));
+ vkWaitForFences = PFN_vkWaitForFences(device ? device.getProcAddr( "vkWaitForFences") : instance.getProcAddr( "vkWaitForFences"));
+ }
+ };
+} // namespace VULKAN_HPP_NAMESPACE
+
+#endif
diff --git a/include/vulkan/vulkan_android.h b/include/vulkan/vulkan_android.h new file mode 100644 index 0000000..07aaeda --- /dev/null +++ b/include/vulkan/vulkan_android.h @@ -0,0 +1,126 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_android_surface 1 +struct ANativeWindow; + +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" + +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; + +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; + +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 3 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" + +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h new file mode 100644 index 0000000..9fefb43 --- /dev/null +++ b/include/vulkan/vulkan_core.h @@ -0,0 +1,7470 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_MAKE_VERSION(major, minor, patch) \ + (((major) << 22) | ((minor) << 12) | (patch)) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 + +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) +// Version of this file +#define VK_HEADER_VERSION 74 + + +#define VK_NULL_HANDLE 0 + + + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; +#else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; +#endif +#endif + + + +typedef uint32_t VkFlags; +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkSampleMask; + +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) + +#define VK_LOD_CLAMP_NONE 1000.0f +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_WHOLE_SIZE (~0ULL) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_TRUE 1 +#define VK_FALSE 0 +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 +#define VK_UUID_SIZE 16 +#define VK_MAX_MEMORY_TYPES 32 +#define VK_MAX_MEMORY_HEAPS 16 +#define VK_MAX_EXTENSION_NAME_SIZE 256 +#define VK_MAX_DESCRIPTION_SIZE 256 + + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_FRAGMENTATION_EXT = -1000161000, + VK_ERROR_NOT_PERMITTED_EXT = -1000174001, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, + VK_RESULT_END_RANGE = VK_INCOMPLETE, + VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, + VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, + VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, + VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, + VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = 1000161004, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, + VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, + VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, + VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, + VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, + VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, + VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, + VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, + VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, + VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, + VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, + VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, + VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, + VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, + VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, + VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, + VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, + VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, + VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, + VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, + VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, + VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, + VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, + VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, + VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, + VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, + VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, + VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, + VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, + VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, + VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, + VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, + VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, + VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, + VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, + VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, + VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_IMG = 1000015000, + VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, + VK_FILTER_END_RANGE = VK_FILTER_LINEAR, + VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, + VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, + VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, + VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, + VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, + VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, + VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, + VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, + VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, + VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, + VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, + VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, + VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, + VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, + VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, + VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, + VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; +typedef VkFlags VkEventCreateFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE = 0x00000010, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; +typedef VkFlags VkFramebufferCreateFlags; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, + VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FRONT_AND_BACK = 0x00000003, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 + + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) + +#define VK_MAX_DEVICE_GROUP_SIZE 32 +#define VK_LUID_SIZE 8 +#define VK_QUEUE_FAMILY_EXTERNAL (~0U-1) + + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1), + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1), + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1), + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1), + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1), + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1), + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; + +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointerFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParameterFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParameterFeatures; + + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) + +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" +#define VK_COLORSPACE_SRGB_NONLINEAR_KHR VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, + VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; + +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) + +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; + +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) + +#define VK_KHR_DISPLAY_SPEC_VERSION 21 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" + + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplayModeCreateFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" + +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" + +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" + +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 3 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" + +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" + +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE + +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE + +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL + +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" + +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" + +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" + +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" + +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" + +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" + +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" + +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" + +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" + +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" + +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" + +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" + +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2" + +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" + +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" + +typedef VkPhysicalDeviceVariablePointerFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + + + +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" + +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" + +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" + +typedef struct VkImageFormatListCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfoKHR; + + + +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 1 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" + +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" + +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME "VK_KHR_maintenance3" + +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) + +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" +#define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT +#define VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT + + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, + VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, + VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, + VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; + +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" + +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" + +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" + +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, + VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1), + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; + +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; + +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" + +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, + VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, + VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; + +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +#define VK_NVX_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) + +#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" + + +typedef enum VkIndirectCommandsTokenTypeNVX { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNVX; + +typedef enum VkObjectEntryTypeNVX { + VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, + VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, + VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, + VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, + VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, + VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, + VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, + VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), + VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF +} VkObjectEntryTypeNVX; + + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNVX; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; + +typedef enum VkObjectEntryUsageFlagBitsNVX { + VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, + VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, + VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF +} VkObjectEntryUsageFlagBitsNVX; +typedef VkFlags VkObjectEntryUsageFlagsNVX; + +typedef struct VkDeviceGeneratedCommandsFeaturesNVX { + VkStructureType sType; + const void* pNext; + VkBool32 computeBindingPointSupport; +} VkDeviceGeneratedCommandsFeaturesNVX; + +typedef struct VkDeviceGeneratedCommandsLimitsNVX { + VkStructureType sType; + const void* pNext; + uint32_t maxIndirectCommandsLayoutTokenCount; + uint32_t maxObjectEntryCounts; + uint32_t minSequenceCountBufferOffsetAlignment; + uint32_t minSequenceIndexBufferOffsetAlignment; + uint32_t minCommandsTokenBufferOffsetAlignment; +} VkDeviceGeneratedCommandsLimitsNVX; + +typedef struct VkIndirectCommandsTokenNVX { + VkIndirectCommandsTokenTypeNVX tokenType; + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsTokenNVX; + +typedef struct VkIndirectCommandsLayoutTokenNVX { + VkIndirectCommandsTokenTypeNVX tokenType; + uint32_t bindingUnit; + uint32_t dynamicCount; + uint32_t divisor; +} VkIndirectCommandsLayoutTokenNVX; + +typedef struct VkIndirectCommandsLayoutCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkIndirectCommandsLayoutUsageFlagsNVX flags; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNVX* pTokens; +} VkIndirectCommandsLayoutCreateInfoNVX; + +typedef struct VkCmdProcessCommandsInfoNVX { + VkStructureType sType; + const void* pNext; + VkObjectTableNVX objectTable; + VkIndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t indirectCommandsTokenCount; + const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; + uint32_t maxSequencesCount; + VkCommandBuffer targetCommandBuffer; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkCmdProcessCommandsInfoNVX; + +typedef struct VkCmdReserveSpaceForCommandsInfoNVX { + VkStructureType sType; + const void* pNext; + VkObjectTableNVX objectTable; + VkIndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkCmdReserveSpaceForCommandsInfoNVX; + +typedef struct VkObjectTableCreateInfoNVX { + VkStructureType sType; + const void* pNext; + uint32_t objectCount; + const VkObjectEntryTypeNVX* pObjectEntryTypes; + const uint32_t* pObjectEntryCounts; + const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; + uint32_t maxUniformBuffersPerDescriptor; + uint32_t maxStorageBuffersPerDescriptor; + uint32_t maxStorageImagesPerDescriptor; + uint32_t maxSampledImagesPerDescriptor; + uint32_t maxPipelineLayouts; +} VkObjectTableCreateInfoNVX; + +typedef struct VkObjectTableEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; +} VkObjectTableEntryNVX; + +typedef struct VkObjectTablePipelineEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipeline pipeline; +} VkObjectTablePipelineEntryNVX; + +typedef struct VkObjectTableDescriptorSetEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipelineLayout pipelineLayout; + VkDescriptorSet descriptorSet; +} VkObjectTableDescriptorSetEntryNVX; + +typedef struct VkObjectTableVertexBufferEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkBuffer buffer; +} VkObjectTableVertexBufferEntryNVX; + +typedef struct VkObjectTableIndexBufferEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkBuffer buffer; + VkIndexType indexType; +} VkObjectTableIndexBufferEntryNVX; + +typedef struct VkObjectTablePushConstantEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipelineLayout pipelineLayout; + VkShaderStageFlags stageFlags; +} VkObjectTablePushConstantEntryNVX; + + +typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); +typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); +typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( + VkCommandBuffer commandBuffer, + const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( + VkCommandBuffer commandBuffer, + const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( + VkDevice device, + VkIndirectCommandsLayoutNVX indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( + VkDevice device, + const VkObjectTableCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkObjectTableNVX* pObjectTable); + +VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( + VkDevice device, + VkObjectTableNVX objectTable, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( + VkDevice device, + VkObjectTableNVX objectTable, + uint32_t objectCount, + const VkObjectTableEntryNVX* const* ppObjectTableEntries, + const uint32_t* pObjectIndices); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( + VkDevice device, + VkObjectTableNVX objectTable, + uint32_t objectCount, + const VkObjectEntryTypeNVX* pObjectEntryTypes, + const uint32_t* pObjectIndices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( + VkPhysicalDevice physicalDevice, + VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, + VkDeviceGeneratedCommandsLimitsNVX* pLimits); +#endif + +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" + +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" +#define VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT + + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; + +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, + VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, + VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, + VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, + VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, + VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, + VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; + +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" + +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" + + +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" + +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + + +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, + VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; + +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; + +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, + VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; + +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; + +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); +#endif + +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, + VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1), + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; + +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; + +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 1 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" + +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~0U-2) + + +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 1 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" + +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageType, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" + + +typedef enum VkSamplerReductionModeEXT { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, + VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, + VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSamplerReductionModeEXT; + +typedef struct VkSamplerReductionModeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSamplerReductionModeEXT reductionMode; +} VkSamplerReductionModeCreateInfoEXT; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" + + +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" + +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, + VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" + +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; + +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, + VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, + VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; + +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; + +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + + + +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) + +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" +#define VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT + + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, + VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, + VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1), + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; + +typedef VkFlags VkValidationCacheCreateFlagsEXT; + +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" + + +typedef enum VkDescriptorBindingFlagBitsEXT { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = 0x00000008, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDescriptorBindingFlagBitsEXT; +typedef VkFlags VkDescriptorBindingFlagsEXT; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlagsEXT* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef struct VkPhysicalDeviceDescriptorIndexingPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupportEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" + + +typedef enum VkQueueGlobalPriorityEXT { + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024, + VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, + VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1), + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF +} VkQueueGlobalPriorityEXT; + +typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityEXT globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" + +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" + +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" + +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" + +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef struct VkVertexInputBindingDivisorDescriptionEXT { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionEXT; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoEXT; + + + +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_ios.h b/include/vulkan/vulkan_ios.h new file mode 100644 index 0000000..a092481 --- /dev/null +++ b/include/vulkan/vulkan_ios.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" + +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; + +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_macos.h b/include/vulkan/vulkan_macos.h new file mode 100644 index 0000000..ff0b701 --- /dev/null +++ b/include/vulkan/vulkan_macos.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" + +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; + +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_mir.h b/include/vulkan/vulkan_mir.h new file mode 100644 index 0000000..7d24ed2 --- /dev/null +++ b/include/vulkan/vulkan_mir.h @@ -0,0 +1,65 @@ +#ifndef VULKAN_MIR_H_ +#define VULKAN_MIR_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_mir_surface 1 +#define VK_KHR_MIR_SURFACE_SPEC_VERSION 4 +#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface" + +typedef VkFlags VkMirSurfaceCreateFlagsKHR; + +typedef struct VkMirSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkMirSurfaceCreateFlagsKHR flags; + MirConnection* connection; + MirSurface* mirSurface; +} VkMirSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( + VkInstance instance, + const VkMirSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + MirConnection* connection); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_vi.h b/include/vulkan/vulkan_vi.h new file mode 100644 index 0000000..015166b --- /dev/null +++ b/include/vulkan/vulkan_vi.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" + +typedef VkFlags VkViSurfaceCreateFlagsNN; + +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_wayland.h b/include/vulkan/vulkan_wayland.h new file mode 100644 index 0000000..5ba0827 --- /dev/null +++ b/include/vulkan/vulkan_wayland.h @@ -0,0 +1,65 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" + +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; + +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_win32.h b/include/vulkan/vulkan_win32.h new file mode 100644 index 0000000..6a85409 --- /dev/null +++ b/include/vulkan/vulkan_win32.h @@ -0,0 +1,276 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" + +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; + +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" + +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" + +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" + +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" + +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" + +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" + +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xcb.h b/include/vulkan/vulkan_xcb.h new file mode 100644 index 0000000..ba03600 --- /dev/null +++ b/include/vulkan/vulkan_xcb.h @@ -0,0 +1,66 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" + +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; + +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xlib.h b/include/vulkan/vulkan_xlib.h new file mode 100644 index 0000000..e1d967e --- /dev/null +++ b/include/vulkan/vulkan_xlib.h @@ -0,0 +1,66 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" + +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; + +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xlib_xrandr.h b/include/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 0000000..117d017 --- /dev/null +++ b/include/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" + +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif |