summaryrefslogtreecommitdiffhomepage
path: root/include/vulkan
diff options
context:
space:
mode:
Diffstat (limited to 'include/vulkan')
-rw-r--r--include/vulkan/vulkan.cppm32
-rw-r--r--include/vulkan/vulkan.hpp243
-rw-r--r--include/vulkan/vulkan_beta.h2
-rw-r--r--include/vulkan/vulkan_core.h240
-rw-r--r--include/vulkan/vulkan_enums.hpp43
-rw-r--r--include/vulkan/vulkan_extension_inspection.hpp9
-rw-r--r--include/vulkan/vulkan_funcs.hpp262
-rw-r--r--include/vulkan/vulkan_handles.hpp136
-rw-r--r--include/vulkan/vulkan_hash.hpp225
-rw-r--r--include/vulkan/vulkan_raii.hpp220
-rw-r--r--include/vulkan/vulkan_static_assertions.hpp115
-rw-r--r--include/vulkan/vulkan_structs.hpp1853
-rw-r--r--include/vulkan/vulkan_to_string.hpp49
-rw-r--r--include/vulkan/vulkan_video.hpp2696
14 files changed, 5917 insertions, 208 deletions
diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm
index 3997211..e5bd813 100644
--- a/include/vulkan/vulkan.cppm
+++ b/include/vulkan/vulkan.cppm
@@ -640,6 +640,10 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_pipeline_executable_properties ===
using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR;
+ //=== VK_EXT_host_image_copy ===
+ using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT;
+ using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT;
+
//=== VK_KHR_map_memory2 ===
using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR;
using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR;
@@ -2060,6 +2064,20 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::PipelineInfoEXT;
using VULKAN_HPP_NAMESPACE::PipelineInfoKHR;
+ //=== VK_EXT_host_image_copy ===
+ using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT;
+ using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT;
+ using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT;
+ using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT;
+ using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT;
+ using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
+ using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT;
+ using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT;
+ using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT;
+ using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
+
//=== VK_KHR_map_memory2 ===
using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR;
using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR;
@@ -2247,9 +2265,7 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_compression_control ===
using VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT;
using VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT;
- using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT;
- using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
//=== VK_EXT_attachment_feedback_loop_layout ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
@@ -2442,6 +2458,12 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV;
+ //=== VK_NV_device_generated_commands_compute ===
+ using VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV;
+ using VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+ using VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV;
+
//=== VK_NV_linear_color_attachment ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV;
@@ -2762,9 +2784,9 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::StructExtends;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VULKAN_HPP_DYNAMIC_LOADER_TOOL )
+#if defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
using VULKAN_HPP_NAMESPACE::DynamicLoader;
-#endif /*VULKAN_HPP_DYNAMIC_LOADER_TOOL*/
+#endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/
using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic;
@@ -2807,7 +2829,7 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::isObsoletedExtension;
using VULKAN_HPP_NAMESPACE::isPromotedExtension;
- export namespace VULKAN_HPP_RAII_NAMESPACE
+ namespace VULKAN_HPP_RAII_NAMESPACE
{
//======================
//=== RAII HARDCODED ===
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
index 603be0f..f2e6e75 100644
--- a/include/vulkan/vulkan.hpp
+++ b/include/vulkan/vulkan.hpp
@@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
-static_assert( VK_HEADER_VERSION == 257, "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 258, "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -212,6 +212,14 @@ constexpr int False = 0;
# define VULKAN_HPP_CONST_OR_CONSTEXPR const
#endif
+#if !defined( VULKAN_HPP_CONSTEXPR_INLINE )
+# if 201606L <= __cpp_inline_variables
+# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR inline
+# else
+# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR
+# endif
+#endif
+
#if !defined( VULKAN_HPP_NOEXCEPT )
# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 )
# define VULKAN_HPP_NOEXCEPT
@@ -4904,6 +4912,37 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
}
+ //=== VK_EXT_host_image_copy ===
+
+ VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo );
+ }
+
+ VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo );
+ }
+
+ VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo );
+ }
+
+ VkResult
+ vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions );
+ }
+
+ void vkGetImageSubresourceLayout2EXT( VkDevice device,
+ VkImage image,
+ const VkImageSubresource2EXT * pSubresource,
+ VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
+ }
+
//=== VK_KHR_map_memory2 ===
VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT
@@ -5237,16 +5276,6 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
}
- //=== VK_EXT_image_compression_control ===
-
- void vkGetImageSubresourceLayout2EXT( VkDevice device,
- VkImage image,
- const VkImageSubresource2EXT * pSubresource,
- VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
- {
- return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
- }
-
//=== VK_EXT_device_fault ===
VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT
@@ -5687,6 +5716,26 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride );
}
+ //=== VK_NV_device_generated_commands_compute ===
+
+ void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device,
+ const VkComputePipelineCreateInfo * pCreateInfo,
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements );
+ }
+
+ void
+ vkCmdUpdatePipelineIndirectBuffer( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdUpdatePipelineIndirectBuffer( commandBuffer, pipelineBindPoint, pipeline );
+ }
+
+ VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo );
+ }
+
//=== VK_EXT_extended_dynamic_state3 ===
void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
@@ -6874,32 +6923,32 @@ namespace VULKAN_HPP_NAMESPACE
//=========================================
//=== CONSTEXPR CONSTANTs AND FUNCTIONs ===
//=========================================
- VULKAN_HPP_CONSTEXPR uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED;
- VULKAN_HPP_CONSTEXPR uint32_t False = VK_FALSE;
- VULKAN_HPP_CONSTEXPR float LodClampNone = VK_LOD_CLAMP_NONE;
- VULKAN_HPP_CONSTEXPR uint32_t LuidSize = VK_LUID_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR;
- VULKAN_HPP_CONSTEXPR uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS;
- VULKAN_HPP_CONSTEXPR uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES;
- VULKAN_HPP_CONSTEXPR uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT;
- VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL;
- VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT;
- VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED;
- VULKAN_HPP_CONSTEXPR uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT;
- VULKAN_HPP_CONSTEXPR uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS;
- VULKAN_HPP_CONSTEXPR uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS;
- VULKAN_HPP_CONSTEXPR uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR;
- VULKAN_HPP_CONSTEXPR uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL;
- VULKAN_HPP_CONSTEXPR uint32_t True = VK_TRUE;
- VULKAN_HPP_CONSTEXPR uint32_t UuidSize = VK_UUID_SIZE;
- VULKAN_HPP_CONSTEXPR uint64_t WholeSize = VK_WHOLE_SIZE;
- VULKAN_HPP_CONSTEXPR uint32_t HeaderVersion = VK_HEADER_VERSION;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE;
+ VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION;
template <typename T, typename = typename std::enable_if<std::is_integral<T>::value>::type>
VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version )
{
@@ -6949,12 +6998,12 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( ( uint32_t )(version)&0xFFFU );
}
- VULKAN_HPP_CONSTEXPR auto ApiVersion = makeApiVersion( 0, 1, 0, 0 );
- VULKAN_HPP_CONSTEXPR auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 );
- VULKAN_HPP_CONSTEXPR auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 );
- VULKAN_HPP_CONSTEXPR auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 );
- VULKAN_HPP_CONSTEXPR auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 );
- VULKAN_HPP_CONSTEXPR auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION );
+ VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 );
+ VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 );
+ VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 );
+ VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 );
+ VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 );
+ VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION );
} // namespace VULKAN_HPP_NAMESPACE
@@ -10604,6 +10653,48 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_host_image_copy ===
+ template <>
+ struct StructExtends<PhysicalDeviceHostImageCopyFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceHostImageCopyFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceHostImageCopyPropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<SubresourceHostMemcpySizeEXT, SubresourceLayout2EXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<HostImageCopyDevicePerformanceQueryEXT, ImageFormatProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_shader_atomic_float2 ===
template <>
struct StructExtends<PhysicalDeviceShaderAtomicFloat2FeaturesEXT, PhysicalDeviceFeatures2>
@@ -12504,6 +12595,24 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_NV_device_generated_commands_compute ===
+ template <>
+ struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_NV_linear_color_attachment ===
template <>
struct StructExtends<PhysicalDeviceLinearColorAttachmentFeaturesNV, PhysicalDeviceFeatures2>
@@ -14064,6 +14173,13 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+ //=== VK_EXT_host_image_copy ===
+ PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0;
+ PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0;
+ PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0;
+ PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0;
+ PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
//=== VK_KHR_map_memory2 ===
PFN_vkMapMemory2KHR vkMapMemory2KHR = 0;
PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0;
@@ -14149,9 +14265,6 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
- //=== VK_EXT_image_compression_control ===
- PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
-
//=== VK_EXT_device_fault ===
PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
@@ -14288,6 +14401,11 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
+ //=== VK_NV_device_generated_commands_compute ===
+ PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0;
+ PFN_vkCmdUpdatePipelineIndirectBuffer vkCmdUpdatePipelineIndirectBuffer = 0;
+ PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0;
+
//=== VK_EXT_extended_dynamic_state3 ===
PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
@@ -15325,6 +15443,13 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableInternalRepresentationsKHR =
PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+ //=== VK_EXT_host_image_copy ===
+ vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) );
+ vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) );
+ vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) );
+ vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) );
+ vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
+
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) );
vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) );
@@ -15446,9 +15571,6 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdResolveImage2 )
vkCmdResolveImage2 = vkCmdResolveImage2KHR;
- //=== VK_EXT_image_compression_control ===
- vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
-
//=== VK_EXT_device_fault ===
vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) );
@@ -15589,6 +15711,13 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDecompressMemoryIndirectCountNV =
PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) );
+ //=== VK_NV_device_generated_commands_compute ===
+ vkGetPipelineIndirectMemoryRequirementsNV =
+ PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) );
+ vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBuffer" ) );
+ vkGetPipelineIndirectDeviceAddressNV =
+ PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) );
+
//=== VK_EXT_extended_dynamic_state3 ===
vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) );
vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) );
@@ -16331,6 +16460,13 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableInternalRepresentationsKHR =
PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+ //=== VK_EXT_host_image_copy ===
+ vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) );
+ vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) );
+ vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
+ vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
+ vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) );
@@ -16446,9 +16582,6 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdResolveImage2 )
vkCmdResolveImage2 = vkCmdResolveImage2KHR;
- //=== VK_EXT_image_compression_control ===
- vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
-
//=== VK_EXT_device_fault ===
vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
@@ -16567,6 +16700,12 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) );
vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
+ //=== VK_NV_device_generated_commands_compute ===
+ vkGetPipelineIndirectMemoryRequirementsNV =
+ PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) );
+ vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBuffer" ) );
+ vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) );
+
//=== VK_EXT_extended_dynamic_state3 ===
vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h
index d2bcf4d..75fabd4 100644
--- a/include/vulkan/vulkan_beta.h
+++ b/include/vulkan/vulkan_beta.h
@@ -645,7 +645,7 @@ typedef struct VkVideoEncodeH265GopRemainingFrameInfoEXT {
// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_displacement_micromap 1
-#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 1
+#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2
#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap"
typedef enum VkDisplacementMicromapFormatNV {
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
index cf85442..b635c86 100644
--- a/include/vulkan/vulkan_core.h
+++ b/include/vulkan/vulkan_core.h
@@ -69,7 +69,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 257
+#define VK_HEADER_VERSION 258
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@@ -800,6 +800,16 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003,
VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004,
VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = 1000270000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = 1000270001,
+ VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = 1000270002,
+ VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = 1000270003,
+ VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = 1000270004,
+ VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = 1000270005,
+ VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = 1000270006,
+ VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = 1000270007,
+ VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = 1000270008,
+ VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = 1000270009,
VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = 1000271000,
VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = 1000271001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000,
@@ -1024,6 +1034,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV = 1000428000,
+ VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV = 1000428001,
+ VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV = 1000428002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000,
@@ -2291,6 +2304,7 @@ typedef enum VkImageUsageFlagBits {
VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000,
VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200,
VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100,
+ VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = 0x00400000,
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000,
#endif
@@ -2681,6 +2695,7 @@ typedef enum VkDescriptorSetLayoutCreateFlagBits {
VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
@@ -6735,6 +6750,7 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_K
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL;
#ifdef VK_ENABLE_BETA_EXTENSIONS
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL;
#endif
@@ -13874,6 +13890,153 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT(
#endif
+// VK_EXT_host_image_copy is a preprocessor guard. Do not pass it to API calls.
+#define VK_EXT_host_image_copy 1
+#define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1
+#define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy"
+
+typedef enum VkHostImageCopyFlagBitsEXT {
+ VK_HOST_IMAGE_COPY_MEMCPY_EXT = 0x00000001,
+ VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkHostImageCopyFlagBitsEXT;
+typedef VkFlags VkHostImageCopyFlagsEXT;
+typedef struct VkPhysicalDeviceHostImageCopyFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 hostImageCopy;
+} VkPhysicalDeviceHostImageCopyFeaturesEXT;
+
+typedef struct VkPhysicalDeviceHostImageCopyPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t copySrcLayoutCount;
+ VkImageLayout* pCopySrcLayouts;
+ uint32_t copyDstLayoutCount;
+ VkImageLayout* pCopyDstLayouts;
+ uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE];
+ VkBool32 identicalMemoryTypeRequirements;
+} VkPhysicalDeviceHostImageCopyPropertiesEXT;
+
+typedef struct VkMemoryToImageCopyEXT {
+ VkStructureType sType;
+ const void* pNext;
+ const void* pHostPointer;
+ uint32_t memoryRowLength;
+ uint32_t memoryImageHeight;
+ VkImageSubresourceLayers imageSubresource;
+ VkOffset3D imageOffset;
+ VkExtent3D imageExtent;
+} VkMemoryToImageCopyEXT;
+
+typedef struct VkImageToMemoryCopyEXT {
+ VkStructureType sType;
+ const void* pNext;
+ void* pHostPointer;
+ uint32_t memoryRowLength;
+ uint32_t memoryImageHeight;
+ VkImageSubresourceLayers imageSubresource;
+ VkOffset3D imageOffset;
+ VkExtent3D imageExtent;
+} VkImageToMemoryCopyEXT;
+
+typedef struct VkCopyMemoryToImageInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkHostImageCopyFlagsEXT flags;
+ VkImage dstImage;
+ VkImageLayout dstImageLayout;
+ uint32_t regionCount;
+ const VkMemoryToImageCopyEXT* pRegions;
+} VkCopyMemoryToImageInfoEXT;
+
+typedef struct VkCopyImageToMemoryInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkHostImageCopyFlagsEXT flags;
+ VkImage srcImage;
+ VkImageLayout srcImageLayout;
+ uint32_t regionCount;
+ const VkImageToMemoryCopyEXT* pRegions;
+} VkCopyImageToMemoryInfoEXT;
+
+typedef struct VkCopyImageToImageInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkHostImageCopyFlagsEXT flags;
+ VkImage srcImage;
+ VkImageLayout srcImageLayout;
+ VkImage dstImage;
+ VkImageLayout dstImageLayout;
+ uint32_t regionCount;
+ const VkImageCopy2* pRegions;
+} VkCopyImageToImageInfoEXT;
+
+typedef struct VkHostImageLayoutTransitionInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage image;
+ VkImageLayout oldLayout;
+ VkImageLayout newLayout;
+ VkImageSubresourceRange subresourceRange;
+} VkHostImageLayoutTransitionInfoEXT;
+
+typedef struct VkSubresourceHostMemcpySizeEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkDeviceSize size;
+} VkSubresourceHostMemcpySizeEXT;
+
+typedef struct VkHostImageCopyDevicePerformanceQueryEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 optimalDeviceAccess;
+ VkBool32 identicalMemoryLayout;
+} VkHostImageCopyDevicePerformanceQueryEXT;
+
+typedef struct VkSubresourceLayout2EXT {
+ VkStructureType sType;
+ void* pNext;
+ VkSubresourceLayout subresourceLayout;
+} VkSubresourceLayout2EXT;
+
+typedef struct VkImageSubresource2EXT {
+ VkStructureType sType;
+ void* pNext;
+ VkImageSubresource imageSubresource;
+} VkImageSubresource2EXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT(
+ VkDevice device,
+ const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT(
+ VkDevice device,
+ const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT(
+ VkDevice device,
+ const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT(
+ VkDevice device,
+ uint32_t transitionCount,
+ const VkHostImageLayoutTransitionInfoEXT* pTransitions);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(
+ VkDevice device,
+ VkImage image,
+ const VkImageSubresource2EXT* pSubresource,
+ VkSubresourceLayout2EXT* pLayout);
+#endif
+
+
// VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls.
#define VK_EXT_shader_atomic_float2 1
#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1
@@ -14022,6 +14185,8 @@ typedef enum VkIndirectCommandsTokenTypeNV {
VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6,
VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7,
VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004,
VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
} VkIndirectCommandsTokenTypeNV;
@@ -15057,18 +15222,6 @@ typedef struct VkImageCompressionControlEXT {
VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags;
} VkImageCompressionControlEXT;
-typedef struct VkSubresourceLayout2EXT {
- VkStructureType sType;
- void* pNext;
- VkSubresourceLayout subresourceLayout;
-} VkSubresourceLayout2EXT;
-
-typedef struct VkImageSubresource2EXT {
- VkStructureType sType;
- void* pNext;
- VkImageSubresource imageSubresource;
-} VkImageSubresource2EXT;
-
typedef struct VkImageCompressionPropertiesEXT {
VkStructureType sType;
void* pNext;
@@ -15076,15 +15229,6 @@ typedef struct VkImageCompressionPropertiesEXT {
VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags;
} VkImageCompressionPropertiesEXT;
-typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout);
-
-#ifndef VK_NO_PROTOTYPES
-VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(
- VkDevice device,
- VkImage image,
- const VkImageSubresource2EXT* pSubresource,
- VkSubresourceLayout2EXT* pLayout);
-#endif
// VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls.
@@ -16283,6 +16427,58 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV(
#endif
+// VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls.
+#define VK_NV_device_generated_commands_compute 1
+#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 1
+#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute"
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 deviceGeneratedCompute;
+ VkBool32 deviceGeneratedComputePipelines;
+ VkBool32 deviceGeneratedComputeCaptureReplay;
+} VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+
+typedef struct VkComputePipelineIndirectBufferInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceAddress deviceAddress;
+ VkDeviceSize size;
+ VkDeviceAddress pipelineDeviceAddressCaptureReplay;
+} VkComputePipelineIndirectBufferInfoNV;
+
+typedef struct VkPipelineIndirectDeviceAddressInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineBindPoint pipelineBindPoint;
+ VkPipeline pipeline;
+} VkPipelineIndirectDeviceAddressInfoNV;
+
+typedef struct VkBindPipelineIndirectCommandNV {
+ VkDeviceAddress pipelineAddress;
+} VkBindPipelineIndirectCommandNV;
+
+typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBuffer)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV(
+ VkDevice device,
+ const VkComputePipelineCreateInfo* pCreateInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBuffer(
+ VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetPipelineIndirectDeviceAddressNV(
+ VkDevice device,
+ const VkPipelineIndirectDeviceAddressInfoNV* pInfo);
+#endif
+
+
// VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_linear_color_attachment 1
#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1
diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp
index f5b118c..27cb036 100644
--- a/include/vulkan/vulkan_enums.hpp
+++ b/include/vulkan/vulkan_enums.hpp
@@ -801,6 +801,16 @@ namespace VULKAN_HPP_NAMESPACE
ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
+ ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT,
+ ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT,
+ eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT,
+ eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT,
+ eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT,
+ eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT,
+ eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT,
+ eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT,
+ eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT,
+ eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT,
eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR,
eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR,
ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT,
@@ -1049,6 +1059,9 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV,
ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV,
ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV,
+ ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV,
+ eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV,
+ ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV,
ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV,
ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT,
ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM,
@@ -1641,6 +1654,7 @@ namespace VULKAN_HPP_NAMESPACE
eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+ eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
@@ -1662,7 +1676,8 @@ namespace VULKAN_HPP_NAMESPACE
ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage |
ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment |
ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eVideoDecodeDstKHR | ImageUsageFlagBits::eVideoDecodeSrcKHR |
- ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR
+ ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR |
+ ImageUsageFlagBits::eHostTransferEXT
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | ImageUsageFlagBits::eVideoEncodeDpbKHR
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -2856,6 +2871,7 @@ namespace VULKAN_HPP_NAMESPACE
eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT,
eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT,
eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,
+ eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV,
eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT
};
@@ -2868,7 +2884,7 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags =
DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR |
DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT |
- DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT;
+ DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT;
};
enum class DescriptorType
@@ -3942,6 +3958,7 @@ namespace VULKAN_HPP_NAMESPACE
eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT,
eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+ eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR,
eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR,
@@ -3977,7 +3994,7 @@ namespace VULKAN_HPP_NAMESPACE
FormatFeatureFlagBits2::eCositedChromaSamples | FormatFeatureFlagBits2::eStorageReadWithoutFormat | FormatFeatureFlagBits2::eStorageWriteWithoutFormat |
FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR |
FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits2::eFragmentDensityMapEXT |
- FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR
+ FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eHostImageTransferEXT
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| FormatFeatureFlagBits2::eVideoEncodeInputKHR | FormatFeatureFlagBits2::eVideoEncodeDpbKHR
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -5704,6 +5721,22 @@ namespace VULKAN_HPP_NAMESPACE
eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
};
+ //=== VK_EXT_host_image_copy ===
+
+ enum class HostImageCopyFlagBitsEXT : VkHostImageCopyFlagsEXT
+ {
+ eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY_EXT
+ };
+
+ using HostImageCopyFlagsEXT = Flags<HostImageCopyFlagBitsEXT>;
+
+ template <>
+ struct FlagTraits<HostImageCopyFlagBitsEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlagsEXT allFlags = HostImageCopyFlagBitsEXT::eMemcpy;
+ };
+
//=== VK_KHR_map_memory2 ===
enum class MemoryUnmapFlagBitsKHR : VkMemoryUnmapFlagsKHR
@@ -5781,7 +5814,9 @@ namespace VULKAN_HPP_NAMESPACE
eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV,
- eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV
+ eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV,
+ ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV,
+ eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV
};
enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp
index 299fabc..5084b29 100644
--- a/include/vulkan/vulkan_extension_inspection.hpp
+++ b/include/vulkan/vulkan_extension_inspection.hpp
@@ -269,6 +269,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_EXT_extended_dynamic_state",
"VK_KHR_deferred_host_operations",
"VK_KHR_pipeline_executable_properties",
+"VK_EXT_host_image_copy",
"VK_KHR_map_memory2",
"VK_EXT_shader_atomic_float2",
"VK_EXT_swapchain_maintenance1",
@@ -366,6 +367,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_QCOM_fragment_density_map_offset",
"VK_NV_copy_memory_indirect",
"VK_NV_memory_decompression",
+"VK_NV_device_generated_commands_compute",
"VK_NV_linear_color_attachment",
"VK_EXT_image_compression_control_swapchain",
"VK_QCOM_image_processing",
@@ -663,6 +665,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_EXT_index_type_uint8", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_EXT_extended_dynamic_state", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_KHR_pipeline_executable_properties", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
+{ "VK_EXT_host_image_copy", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_copy_commands2", "VK_KHR_format_feature_flags2", } } } } },
{ "VK_EXT_shader_atomic_float2", { { "VK_VERSION_1_0", { { "VK_EXT_shader_atomic_float", } } } } },
{ "VK_EXT_surface_maintenance1", { { "VK_VERSION_1_0", { { "VK_KHR_surface", "VK_KHR_get_surface_capabilities2", } } } } },
{ "VK_EXT_swapchain_maintenance1", { { "VK_VERSION_1_0", { { "VK_KHR_swapchain", "VK_EXT_surface_maintenance1", "VK_KHR_get_physical_device_properties2", } } } } },
@@ -758,6 +761,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_QCOM_fragment_density_map_offset", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_EXT_fragment_density_map", } } } } },
{ "VK_NV_copy_memory_indirect", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address", } } } } },
{ "VK_NV_memory_decompression", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address", } } } } },
+{ "VK_NV_device_generated_commands_compute", { { "VK_VERSION_1_0", { { "VK_NV_device_generated_commands", } } } } },
{ "VK_NV_linear_color_attachment", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
{ "VK_GOOGLE_surfaceless_query", { { "VK_VERSION_1_0", { { "VK_KHR_surface", } } } } },
{ "VK_EXT_image_compression_control_swapchain", { { "VK_VERSION_1_0", { { "VK_EXT_image_compression_control", } } } } },
@@ -1432,7 +1436,7 @@ namespace VULKAN_HPP_NAMESPACE
|| ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) ||
( extension == "VK_EXT_shader_atomic_float" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) ||
( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_KHR_deferred_host_operations" ) ||
- ( extension == "VK_KHR_pipeline_executable_properties" ) || ( extension == "VK_KHR_map_memory2" ) ||
+ ( extension == "VK_KHR_pipeline_executable_properties" ) || ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) ||
( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) ||
( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) ||
( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) ||
@@ -1483,7 +1487,8 @@ namespace VULKAN_HPP_NAMESPACE
( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) ||
( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) ||
( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_QCOM_fragment_density_map_offset" ) ||
- ( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_linear_color_attachment" ) ||
+ ( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) ||
+ ( extension == "VK_NV_device_generated_commands_compute" ) || ( extension == "VK_NV_linear_color_attachment" ) ||
( extension == "VK_EXT_image_compression_control_swapchain" ) || ( extension == "VK_QCOM_image_processing" ) ||
( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) ||
( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) ||
diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp
index 6b43434..b8cee1b 100644
--- a/include/vulkan/vulkan_funcs.hpp
+++ b/include/vulkan/vulkan_funcs.hpp
@@ -18008,6 +18008,146 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+ //=== VK_EXT_host_image_copy ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( &copyMemoryToImageInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( &copyImageToMemoryInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( &copyImageToImageInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount,
+ const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>(
+ d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result =
+ d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetImageSubresourceLayout2EXT( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+ d.vkGetImageSubresourceLayout2EXT( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+ d.vkGetImageSubresourceLayout2EXT( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return structureChain;
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
//=== VK_KHR_map_memory2 ===
template <typename Dispatch>
@@ -19429,54 +19569,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
- //=== VK_EXT_image_compression_control ===
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSubresourceLayout2EXT( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT(
- VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
-
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
- d.vkGetImageSubresourceLayout2EXT( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
-
- return layout;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
- VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
-
- VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
- d.vkGetImageSubresourceLayout2EXT( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
-
- return structureChain;
- }
-#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
-
//=== VK_EXT_device_fault ===
template <typename Dispatch>
@@ -21107,6 +21199,80 @@ namespace VULKAN_HPP_NAMESPACE
m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
}
+ //=== VK_NV_device_generated_commands_compute ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetPipelineIndirectMemoryRequirementsNV(
+ m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetPipelineIndirectMemoryRequirementsNV(
+ m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
+ return memoryRequirements;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetPipelineIndirectMemoryRequirementsNV(
+ m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
+ return structureChain;
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdUpdatePipelineIndirectBuffer( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<DeviceAddress>(
+ d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
+ Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
//=== VK_EXT_extended_dynamic_state3 ===
template <typename Dispatch>
diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp
index 9981076..f0545d9 100644
--- a/include/vulkan/vulkan_handles.hpp
+++ b/include/vulkan/vulkan_handles.hpp
@@ -1141,6 +1141,20 @@ namespace VULKAN_HPP_NAMESPACE
struct PipelineExecutableStatisticKHR;
struct PipelineExecutableInternalRepresentationKHR;
+ //=== VK_EXT_host_image_copy ===
+ struct PhysicalDeviceHostImageCopyFeaturesEXT;
+ struct PhysicalDeviceHostImageCopyPropertiesEXT;
+ struct MemoryToImageCopyEXT;
+ struct ImageToMemoryCopyEXT;
+ struct CopyMemoryToImageInfoEXT;
+ struct CopyImageToMemoryInfoEXT;
+ struct CopyImageToImageInfoEXT;
+ struct HostImageLayoutTransitionInfoEXT;
+ struct SubresourceHostMemcpySizeEXT;
+ struct HostImageCopyDevicePerformanceQueryEXT;
+ struct SubresourceLayout2EXT;
+ struct ImageSubresource2EXT;
+
//=== VK_KHR_map_memory2 ===
struct MemoryMapInfoKHR;
struct MemoryUnmapInfoKHR;
@@ -1328,8 +1342,6 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_compression_control ===
struct PhysicalDeviceImageCompressionControlFeaturesEXT;
struct ImageCompressionControlEXT;
- struct SubresourceLayout2EXT;
- struct ImageSubresource2EXT;
struct ImageCompressionPropertiesEXT;
//=== VK_EXT_attachment_feedback_loop_layout ===
@@ -1523,6 +1535,12 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceMemoryDecompressionFeaturesNV;
struct PhysicalDeviceMemoryDecompressionPropertiesNV;
+ //=== VK_NV_device_generated_commands_compute ===
+ struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+ struct ComputePipelineIndirectBufferInfoNV;
+ struct PipelineIndirectDeviceAddressInfoNV;
+ struct BindPipelineIndirectCommandNV;
+
//=== VK_NV_linear_color_attachment ===
struct PhysicalDeviceLinearColorAttachmentFeaturesNV;
@@ -5945,6 +5963,13 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t stride,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_device_generated_commands_compute ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_extended_dynamic_state3 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -11874,6 +11899,67 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+ //=== VK_EXT_host_image_copy ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount,
+ const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
+ getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
//=== VK_KHR_map_memory2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -12187,26 +12273,6 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
- //=== VK_EXT_image_compression_control ===
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
- getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
- getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
-
//=== VK_EXT_device_fault ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -12616,6 +12682,32 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+ //=== VK_NV_device_generated_commands_compute ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
//=== VK_EXT_shader_module_identifier ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp
index 7cec116..f7305e7 100644
--- a/include/vulkan/vulkan_hash.hpp
+++ b/include/vulkan/vulkan_hash.hpp
@@ -1374,6 +1374,17 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const & bindPipelineIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, bindPipelineIndirectCommandNV.pipelineAddress );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const & bindShaderGroupIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
@@ -2274,6 +2285,21 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const & computePipelineIndirectBufferInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.deviceAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.size );
+ VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pipelineDeviceAddressCaptureReplay );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & conditionalRenderingBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT
@@ -2476,6 +2502,60 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT const & copyImageToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImage );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImageLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImage );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImageLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.regionCount );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pRegions );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT const & imageToMemoryCopyEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pHostPointer );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryRowLength );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryImageHeight );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageSubresource );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageExtent );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT const & copyImageToMemoryInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImage );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImageLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.regionCount );
+ VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pRegions );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const & copyMemoryIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
@@ -2505,6 +2585,41 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT const & memoryToImageCopyEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pHostPointer );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryRowLength );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryImageHeight );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageSubresource );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageExtent );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT const & copyMemoryToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImage );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImageLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.regionCount );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pRegions );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT ) const VULKAN_HPP_NOEXCEPT
@@ -5148,6 +5263,37 @@ namespace std
}
};
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT const & hostImageCopyDevicePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.optimalDeviceAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.identicalMemoryLayout );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT const & hostImageLayoutTransitionInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.image );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.oldLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.newLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.subresourceRange );
+ return seed;
+ }
+ };
+
# if defined( VK_USE_PLATFORM_IOS_MVK )
template <>
struct hash<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>
@@ -7523,6 +7669,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &
+ physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedCompute );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputePipelines );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputeCaptureReplay );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
{
std::size_t operator()(
@@ -8250,6 +8412,42 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT const & physicalDeviceHostImageCopyFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.hostImageCopy );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT const & physicalDeviceHostImageCopyPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copySrcLayoutCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopySrcLayouts );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copyDstLayoutCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopyDstLayouts );
+ for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.optimalTilingLayoutUUID[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.identicalMemoryTypeRequirements );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & physicalDeviceHostQueryResetFeatures ) const VULKAN_HPP_NOEXCEPT
@@ -11487,6 +11685,20 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const & pipelineIndirectDeviceAddressInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipelineBindPoint );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipeline );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR ) const VULKAN_HPP_NOEXCEPT
@@ -13354,6 +13566,19 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT const & subresourceHostMemcpySizeEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.size );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT ) const VULKAN_HPP_NOEXCEPT
diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp
index 9c1939c..df3353f 100644
--- a/include/vulkan/vulkan_raii.hpp
+++ b/include/vulkan/vulkan_raii.hpp
@@ -1331,6 +1331,13 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableInternalRepresentationsKHR =
PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+ //=== VK_EXT_host_image_copy ===
+ vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) );
+ vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) );
+ vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
+ vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
+ vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) );
@@ -1446,9 +1453,6 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdResolveImage2 )
vkCmdResolveImage2 = vkCmdResolveImage2KHR;
- //=== VK_EXT_image_compression_control ===
- vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
-
//=== VK_EXT_device_fault ===
vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
@@ -1569,6 +1573,13 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDecompressMemoryIndirectCountNV =
PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
+ //=== VK_NV_device_generated_commands_compute ===
+ vkGetPipelineIndirectMemoryRequirementsNV =
+ PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) );
+ vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBuffer" ) );
+ vkGetPipelineIndirectDeviceAddressNV =
+ PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) );
+
//=== VK_EXT_extended_dynamic_state3 ===
vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
@@ -2195,6 +2206,13 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+ //=== VK_EXT_host_image_copy ===
+ PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0;
+ PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0;
+ PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0;
+ PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0;
+ PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
//=== VK_KHR_map_memory2 ===
PFN_vkMapMemory2KHR vkMapMemory2KHR = 0;
PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0;
@@ -2274,9 +2292,6 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
- //=== VK_EXT_image_compression_control ===
- PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
-
//=== VK_EXT_device_fault ===
PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
@@ -2386,6 +2401,11 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
+ //=== VK_NV_device_generated_commands_compute ===
+ PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0;
+ PFN_vkCmdUpdatePipelineIndirectBuffer vkCmdUpdatePipelineIndirectBuffer = 0;
+ PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0;
+
//=== VK_EXT_extended_dynamic_state3 ===
PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
@@ -4022,6 +4042,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
+ //=== VK_EXT_host_image_copy ===
+
+ void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const;
+
+ void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const;
+
+ void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const;
+
+ void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions ) const;
+
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const;
@@ -4209,6 +4239,18 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(
const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_device_generated_commands_compute ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
+ getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
@@ -5883,6 +5925,11 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_device_generated_commands_compute ===
+
+ void updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_extended_dynamic_state3 ===
void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT;
@@ -8103,7 +8150,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const;
- //=== VK_EXT_image_compression_control ===
+ //=== VK_EXT_host_image_copy ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
@@ -18150,6 +18197,77 @@ namespace VULKAN_HPP_NAMESPACE
return internalRepresentations;
}
+ //=== VK_EXT_host_image_copy ===
+
+ VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy>" );
+
+ VkResult result = getDispatcher()->vkCopyMemoryToImageEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( &copyMemoryToImageInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" );
+ }
+
+ VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy>" );
+
+ VkResult result = getDispatcher()->vkCopyImageToMemoryEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( &copyImageToMemoryInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" );
+ }
+
+ VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy>" );
+
+ VkResult result = getDispatcher()->vkCopyImageToImageEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkCopyImageToImageInfoEXT *>( &copyImageToImageInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" );
+ }
+
+ VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy>" );
+
+ VkResult result = getDispatcher()->vkTransitionImageLayoutEXT(
+ static_cast<VkDevice>( m_device ), transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
+ Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
+ "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control>" );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+ getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
+ "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control>" );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+ getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return structureChain;
+ }
+
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const
@@ -18837,40 +18955,6 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
- //=== VK_EXT_image_compression_control ===
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
- Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
- "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_image_compression_control>" );
-
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
- getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
-
- return layout;
- }
-
- template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
- Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
- "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_image_compression_control>" );
-
- VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
- getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
-
- return structureChain;
- }
-
//=== VK_EXT_device_fault ===
VULKAN_HPP_NODISCARD
@@ -19650,6 +19734,60 @@ namespace VULKAN_HPP_NAMESPACE
stride );
}
+ //=== VK_NV_device_generated_commands_compute ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV &&
+ "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
+
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
+ return memoryRequirements;
+ }
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV &&
+ "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
+ return structureChain;
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBuffer &&
+ "Function <vkCmdUpdatePipelineIndirectBuffer> requires <VK_NV_device_generated_commands_compute>" );
+
+ getDispatcher()->vkCmdUpdatePipelineIndirectBuffer(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
+ Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV &&
+ "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" );
+
+ VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+ }
+
//=== VK_EXT_extended_dynamic_state3 ===
VULKAN_HPP_INLINE void
diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp
index fb0b523..ba79591 100644
--- a/include/vulkan/vulkan_static_assertions.hpp
+++ b/include/vulkan/vulkan_static_assertions.hpp
@@ -4560,6 +4560,80 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Pipeline
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
"PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" );
+//=== VK_EXT_host_image_copy ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT>::value,
+ "PhysicalDeviceHostImageCopyFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT>::value,
+ "PhysicalDeviceHostImageCopyPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT ) == sizeof( VkMemoryToImageCopyEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT>::value,
+ "MemoryToImageCopyEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT ) == sizeof( VkImageToMemoryCopyEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT>::value,
+ "ImageToMemoryCopyEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT ) == sizeof( VkCopyMemoryToImageInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT>::value,
+ "CopyMemoryToImageInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT ) == sizeof( VkCopyImageToMemoryInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT>::value,
+ "CopyImageToMemoryInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT ) == sizeof( VkCopyImageToImageInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT>::value,
+ "CopyImageToImageInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT ) == sizeof( VkHostImageLayoutTransitionInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT>::value,
+ "HostImageLayoutTransitionInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT ) == sizeof( VkSubresourceHostMemcpySizeEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT>::value,
+ "SubresourceHostMemcpySizeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT ) == sizeof( VkHostImageCopyDevicePerformanceQueryEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>::value,
+ "HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value,
+ "SubresourceLayout2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value,
+ "ImageSubresource2EXT is not nothrow_move_constructible!" );
+
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" );
@@ -5427,17 +5501,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCom
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value,
"ImageCompressionControlEXT is not nothrow_move_constructible!" );
-VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ),
- "struct and wrapper have different size!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "struct wrapper is not a standard layout!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value,
- "SubresourceLayout2EXT is not nothrow_move_constructible!" );
-
-VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "struct wrapper is not a standard layout!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value,
- "ImageSubresource2EXT is not nothrow_move_constructible!" );
-
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
@@ -6222,6 +6285,36 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV>::value,
"PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" );
+//=== VK_NV_device_generated_commands_compute ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>::value,
+ "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV ) == sizeof( VkComputePipelineIndirectBufferInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV>::value,
+ "ComputePipelineIndirectBufferInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV ) == sizeof( VkPipelineIndirectDeviceAddressInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV>::value,
+ "PipelineIndirectDeviceAddressInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV ) == sizeof( VkBindPipelineIndirectCommandNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV>::value,
+ "BindPipelineIndirectCommandNV is not nothrow_move_constructible!" );
+
//=== VK_NV_linear_color_attachment ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) ==
diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp
index ab71413..97dbee4 100644
--- a/include/vulkan/vulkan_structs.hpp
+++ b/include/vulkan/vulkan_structs.hpp
@@ -8173,6 +8173,84 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
};
+ struct BindPipelineIndirectCommandNV
+ {
+ using NativeType = VkBindPipelineIndirectCommandNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pipelineAddress( pipelineAddress_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : BindPipelineIndirectCommandNV( *reinterpret_cast<BindPipelineIndirectCommandNV const *>( &rhs ) )
+ {
+ }
+
+ BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineAddress = pipelineAddress_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBindPipelineIndirectCommandNV *>( this );
+ }
+
+ operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBindPipelineIndirectCommandNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( pipelineAddress );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default;
+#else
+ bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( pipelineAddress == rhs.pipelineAddress );
+# endif
+ }
+
+ bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress = {};
+ };
+
struct BindShaderGroupIndirectCommandNV
{
using NativeType = VkBindShaderGroupIndirectCommandNV;
@@ -15440,6 +15518,127 @@ namespace VULKAN_HPP_NAMESPACE
using Type = ComputePipelineCreateInfo;
};
+ struct ComputePipelineIndirectBufferInfoNV
+ {
+ using NativeType = VkComputePipelineIndirectBufferInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceAddress( deviceAddress_ )
+ , size( size_ )
+ , pipelineDeviceAddressCaptureReplay( pipelineDeviceAddressCaptureReplay_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast<ComputePipelineIndirectBufferInfoNV const *>( &rhs ) )
+ {
+ }
+
+ ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceAddress = deviceAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV &
+ setPipelineDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkComputePipelineIndirectBufferInfoNV *>( this );
+ }
+
+ operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkComputePipelineIndirectBufferInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default;
+#else
+ bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) &&
+ ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay );
+# endif
+ }
+
+ bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eComputePipelineIndirectBufferInfoNV>
+ {
+ using Type = ComputePipelineIndirectBufferInfoNV;
+ };
+
struct ConditionalRenderingBeginInfoEXT
{
using NativeType = VkConditionalRenderingBeginInfoEXT;
@@ -17284,6 +17483,515 @@ namespace VULKAN_HPP_NAMESPACE
};
using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
+ struct CopyImageToImageInfoEXT
+ {
+ using NativeType = VkCopyImageToImageInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , srcImage( srcImage_ )
+ , srcImageLayout( srcImageLayout_ )
+ , dstImage( dstImage_ )
+ , dstImageLayout( dstImageLayout_ )
+ , regionCount( regionCount_ )
+ , pRegions( pRegions_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageToImageInfoEXT( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyImageToImageInfoEXT( *reinterpret_cast<CopyImageToImageInfoEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::Image srcImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
+ VULKAN_HPP_NAMESPACE::Image dstImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , srcImage( srcImage_ )
+ , srcImageLayout( srcImageLayout_ )
+ , dstImage( dstImage_ )
+ , dstImageLayout( dstImageLayout_ )
+ , regionCount( static_cast<uint32_t>( regions_.size() ) )
+ , pRegions( regions_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ CopyImageToImageInfoEXT & operator=( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyImageToImageInfoEXT & operator=( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImage = dstImage_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CopyImageToImageInfoEXT &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyImageToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyImageToImageInfoEXT *>( this );
+ }
+
+ operator VkCopyImageToImageInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyImageToImageInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::Image const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout const &,
+ VULKAN_HPP_NAMESPACE::Image const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CopyImageToImageInfoEXT const & ) const = default;
+#else
+ bool operator==( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) &&
+ ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) &&
+ ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+# endif
+ }
+
+ bool operator!=( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyImageToImageInfoEXT>
+ {
+ using Type = CopyImageToImageInfoEXT;
+ };
+
+ struct ImageToMemoryCopyEXT
+ {
+ using NativeType = VkImageToMemoryCopyEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopyEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( void * pHostPointer_ = {},
+ uint32_t memoryRowLength_ = {},
+ uint32_t memoryImageHeight_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pHostPointer( pHostPointer_ )
+ , memoryRowLength( memoryRowLength_ )
+ , memoryImageHeight( memoryImageHeight_ )
+ , imageSubresource( imageSubresource_ )
+ , imageOffset( imageOffset_ )
+ , imageExtent( imageExtent_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageToMemoryCopyEXT( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageToMemoryCopyEXT( *reinterpret_cast<ImageToMemoryCopyEXT const *>( &rhs ) )
+ {
+ }
+
+ ImageToMemoryCopyEXT & operator=( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImageToMemoryCopyEXT & operator=( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pHostPointer = pHostPointer_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryRowLength = memoryRowLength_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryImageHeight = memoryImageHeight_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT &
+ setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageSubresource = imageSubresource_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageOffset = imageOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImageToMemoryCopyEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageToMemoryCopyEXT *>( this );
+ }
+
+ operator VkImageToMemoryCopyEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageToMemoryCopyEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ void * const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
+ VULKAN_HPP_NAMESPACE::Offset3D const &,
+ VULKAN_HPP_NAMESPACE::Extent3D const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImageToMemoryCopyEXT const & ) const = default;
+#else
+ bool operator==( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) &&
+ ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
+ ( imageExtent == rhs.imageExtent );
+# endif
+ }
+
+ bool operator!=( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopyEXT;
+ const void * pNext = {};
+ void * pHostPointer = {};
+ uint32_t memoryRowLength = {};
+ uint32_t memoryImageHeight = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageToMemoryCopyEXT>
+ {
+ using Type = ImageToMemoryCopyEXT;
+ };
+
+ struct CopyImageToMemoryInfoEXT
+ {
+ using NativeType = VkCopyImageToMemoryInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , srcImage( srcImage_ )
+ , srcImageLayout( srcImageLayout_ )
+ , regionCount( regionCount_ )
+ , pRegions( pRegions_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageToMemoryInfoEXT( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyImageToMemoryInfoEXT( *reinterpret_cast<CopyImageToMemoryInfoEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::Image srcImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , srcImage( srcImage_ )
+ , srcImageLayout( srcImageLayout_ )
+ , regionCount( static_cast<uint32_t>( regions_.size() ) )
+ , pRegions( regions_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ CopyImageToMemoryInfoEXT & operator=( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyImageToMemoryInfoEXT & operator=( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CopyImageToMemoryInfoEXT &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyImageToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( this );
+ }
+
+ operator VkCopyImageToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyImageToMemoryInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::Image const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CopyImageToMemoryInfoEXT const & ) const = default;
+#else
+ bool operator==( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) &&
+ ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+# endif
+ }
+
+ bool operator!=( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyImageToMemoryInfoEXT>
+ {
+ using Type = CopyImageToMemoryInfoEXT;
+ };
+
struct CopyMemoryIndirectCommandNV
{
using NativeType = VkCopyMemoryIndirectCommandNV;
@@ -17614,6 +18322,324 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
+ struct MemoryToImageCopyEXT
+ {
+ using NativeType = VkMemoryToImageCopyEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopyEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( const void * pHostPointer_ = {},
+ uint32_t memoryRowLength_ = {},
+ uint32_t memoryImageHeight_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pHostPointer( pHostPointer_ )
+ , memoryRowLength( memoryRowLength_ )
+ , memoryImageHeight( memoryImageHeight_ )
+ , imageSubresource( imageSubresource_ )
+ , imageOffset( imageOffset_ )
+ , imageExtent( imageExtent_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryToImageCopyEXT( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MemoryToImageCopyEXT( *reinterpret_cast<MemoryToImageCopyEXT const *>( &rhs ) )
+ {
+ }
+
+ MemoryToImageCopyEXT & operator=( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MemoryToImageCopyEXT & operator=( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pHostPointer = pHostPointer_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryRowLength = memoryRowLength_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryImageHeight = memoryImageHeight_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT &
+ setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageSubresource = imageSubresource_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageOffset = imageOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageExtent = imageExtent_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMemoryToImageCopyEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryToImageCopyEXT *>( this );
+ }
+
+ operator VkMemoryToImageCopyEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryToImageCopyEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ const void * const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
+ VULKAN_HPP_NAMESPACE::Offset3D const &,
+ VULKAN_HPP_NAMESPACE::Extent3D const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MemoryToImageCopyEXT const & ) const = default;
+#else
+ bool operator==( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) &&
+ ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
+ ( imageExtent == rhs.imageExtent );
+# endif
+ }
+
+ bool operator!=( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopyEXT;
+ const void * pNext = {};
+ const void * pHostPointer = {};
+ uint32_t memoryRowLength = {};
+ uint32_t memoryImageHeight = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryToImageCopyEXT>
+ {
+ using Type = MemoryToImageCopyEXT;
+ };
+
+ struct CopyMemoryToImageInfoEXT
+ {
+ using NativeType = VkCopyMemoryToImageInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , dstImage( dstImage_ )
+ , dstImageLayout( dstImageLayout_ )
+ , regionCount( regionCount_ )
+ , pRegions( pRegions_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToImageInfoEXT( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMemoryToImageInfoEXT( *reinterpret_cast<CopyMemoryToImageInfoEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::Image dstImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , dstImage( dstImage_ )
+ , dstImageLayout( dstImageLayout_ )
+ , regionCount( static_cast<uint32_t>( regions_.size() ) )
+ , pRegions( regions_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ CopyMemoryToImageInfoEXT & operator=( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMemoryToImageInfoEXT & operator=( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImage = dstImage_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CopyMemoryToImageInfoEXT &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMemoryToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( this );
+ }
+
+ operator VkCopyMemoryToImageInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMemoryToImageInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::Image const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CopyMemoryToImageInfoEXT const & ) const = default;
+#else
+ bool operator==( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) &&
+ ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+# endif
+ }
+
+ bool operator!=( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMemoryToImageInfoEXT>
+ {
+ using Type = CopyMemoryToImageInfoEXT;
+ };
+
struct CopyMemoryToMicromapInfoEXT
{
using NativeType = VkCopyMemoryToMicromapInfoEXT;
@@ -38861,6 +39887,224 @@ namespace VULKAN_HPP_NAMESPACE
using Type = HeadlessSurfaceCreateInfoEXT;
};
+ struct HostImageCopyDevicePerformanceQueryEXT
+ {
+ using NativeType = VkHostImageCopyDevicePerformanceQueryEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQueryEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , optimalDeviceAccess( optimalDeviceAccess_ )
+ , identicalMemoryLayout( identicalMemoryLayout_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ HostImageCopyDevicePerformanceQueryEXT( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : HostImageCopyDevicePerformanceQueryEXT( *reinterpret_cast<HostImageCopyDevicePerformanceQueryEXT const *>( &rhs ) )
+ {
+ }
+
+ HostImageCopyDevicePerformanceQueryEXT & operator=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ HostImageCopyDevicePerformanceQueryEXT & operator=( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkHostImageCopyDevicePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkHostImageCopyDevicePerformanceQueryEXT *>( this );
+ }
+
+ operator VkHostImageCopyDevicePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkHostImageCopyDevicePerformanceQueryEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( HostImageCopyDevicePerformanceQueryEXT const & ) const = default;
+#else
+ bool operator==( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) &&
+ ( identicalMemoryLayout == rhs.identicalMemoryLayout );
+# endif
+ }
+
+ bool operator!=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQueryEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eHostImageCopyDevicePerformanceQueryEXT>
+ {
+ using Type = HostImageCopyDevicePerformanceQueryEXT;
+ };
+
+ struct HostImageLayoutTransitionInfoEXT
+ {
+ using NativeType = VkHostImageLayoutTransitionInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
+ , oldLayout( oldLayout_ )
+ , newLayout( newLayout_ )
+ , subresourceRange( subresourceRange_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ HostImageLayoutTransitionInfoEXT( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : HostImageLayoutTransitionInfoEXT( *reinterpret_cast<HostImageLayoutTransitionInfoEXT const *>( &rhs ) )
+ {
+ }
+
+ HostImageLayoutTransitionInfoEXT & operator=( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ HostImageLayoutTransitionInfoEXT & operator=( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image = image_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ oldLayout = oldLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ newLayout = newLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT &
+ setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subresourceRange = subresourceRange_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkHostImageLayoutTransitionInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( this );
+ }
+
+ operator VkHostImageLayoutTransitionInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkHostImageLayoutTransitionInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::Image const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout const &,
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( HostImageLayoutTransitionInfoEXT const & ) const = default;
+#else
+ bool operator==( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) &&
+ ( subresourceRange == rhs.subresourceRange );
+# endif
+ }
+
+ bool operator!=( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eHostImageLayoutTransitionInfoEXT>
+ {
+ using Type = HostImageLayoutTransitionInfoEXT;
+ };
+
#if defined( VK_USE_PLATFORM_IOS_MVK )
struct IOSSurfaceCreateInfoMVK
{
@@ -56538,6 +57782,133 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
};
+ struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV
+ {
+ using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceGeneratedCompute( deviceGeneratedCompute_ )
+ , deviceGeneratedComputePipelines( deviceGeneratedComputePipelines_ )
+ , deviceGeneratedComputeCaptureReplay( deviceGeneratedComputeCaptureReplay_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
+ operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
+ operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
+ setDeviceGeneratedCompute( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceGeneratedCompute = deviceGeneratedCompute_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
+ setDeviceGeneratedComputePipelines( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
+ setDeviceGeneratedComputeCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) &&
+ ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) &&
+ ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>
+ {
+ using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+ };
+
struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
@@ -61591,6 +62962,295 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+ struct PhysicalDeviceHostImageCopyFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceHostImageCopyFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , hostImageCopy( hostImageCopy_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceHostImageCopyFeaturesEXT( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceHostImageCopyFeaturesEXT( *reinterpret_cast<PhysicalDeviceHostImageCopyFeaturesEXT const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceHostImageCopyFeaturesEXT & operator=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceHostImageCopyFeaturesEXT & operator=( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hostImageCopy = hostImageCopy_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceHostImageCopyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceHostImageCopyFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceHostImageCopyFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceHostImageCopyFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, hostImageCopy );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceHostImageCopyFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT>
+ {
+ using Type = PhysicalDeviceHostImageCopyFeaturesEXT;
+ };
+
+ struct PhysicalDeviceHostImageCopyPropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceHostImageCopyPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( uint32_t copySrcLayoutCount_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {},
+ uint32_t copyDstLayoutCount_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & optimalTilingLayoutUUID_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , copySrcLayoutCount( copySrcLayoutCount_ )
+ , pCopySrcLayouts( pCopySrcLayouts_ )
+ , copyDstLayoutCount( copyDstLayoutCount_ )
+ , pCopyDstLayouts( pCopyDstLayouts_ )
+ , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ )
+ , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceHostImageCopyPropertiesEXT( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceHostImageCopyPropertiesEXT( *reinterpret_cast<PhysicalDeviceHostImageCopyPropertiesEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PhysicalDeviceHostImageCopyPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copySrcLayouts_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copyDstLayouts_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & optimalTilingLayoutUUID_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {},
+ void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , copySrcLayoutCount( static_cast<uint32_t>( copySrcLayouts_.size() ) )
+ , pCopySrcLayouts( copySrcLayouts_.data() )
+ , copyDstLayoutCount( static_cast<uint32_t>( copyDstLayouts_.size() ) )
+ , pCopyDstLayouts( copyDstLayouts_.data() )
+ , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ )
+ , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ PhysicalDeviceHostImageCopyPropertiesEXT & operator=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceHostImageCopyPropertiesEXT & operator=( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ copySrcLayoutCount = copySrcLayoutCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT &
+ setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCopySrcLayouts = pCopySrcLayouts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PhysicalDeviceHostImageCopyPropertiesEXT &
+ setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ copySrcLayoutCount = static_cast<uint32_t>( copySrcLayouts_.size() );
+ pCopySrcLayouts = copySrcLayouts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ copyDstLayoutCount = copyDstLayoutCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT &
+ setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCopyDstLayouts = pCopyDstLayouts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PhysicalDeviceHostImageCopyPropertiesEXT &
+ setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ copyDstLayoutCount = static_cast<uint32_t>( copyDstLayouts_.size() );
+ pCopyDstLayouts = copyDstLayouts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT &
+ setOptimalTilingLayoutUUID( std::array<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ optimalTilingLayoutUUID = optimalTilingLayoutUUID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT &
+ setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT
+ {
+ identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceHostImageCopyPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceHostImageCopyPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceHostImageCopyPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceHostImageCopyPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ImageLayout * const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceHostImageCopyPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) &&
+ ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) &&
+ ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT;
+ void * pNext = {};
+ uint32_t copySrcLayoutCount = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {};
+ uint32_t copyDstLayoutCount = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID = {};
+ VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT>
+ {
+ using Type = PhysicalDeviceHostImageCopyPropertiesEXT;
+ };
+
struct PhysicalDeviceHostQueryResetFeatures
{
using NativeType = VkPhysicalDeviceHostQueryResetFeatures;
@@ -83667,6 +85327,117 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
};
+ struct PipelineIndirectDeviceAddressInfoNV
+ {
+ using NativeType = VkPipelineIndirectDeviceAddressInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineIndirectDeviceAddressInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PipelineIndirectDeviceAddressInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
+ , pipeline( pipeline_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast<PipelineIndirectDeviceAddressInfoNV const *>( &rhs ) )
+ {
+ }
+
+ PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV &
+ setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipeline = pipeline_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( this );
+ }
+
+ operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineIndirectDeviceAddressInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint const &,
+ VULKAN_HPP_NAMESPACE::Pipeline const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pipelineBindPoint, pipeline );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default;
+#else
+ bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline );
+# endif
+ }
+
+ bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineIndirectDeviceAddressInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineIndirectDeviceAddressInfoNV>
+ {
+ using Type = PipelineIndirectDeviceAddressInfoNV;
+ };
+
struct PipelineInfoKHR
{
using NativeType = VkPipelineInfoKHR;
@@ -99024,6 +100795,88 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SubpassShadingPipelineCreateInfoHUAWEI;
};
+ struct SubresourceHostMemcpySizeEXT
+ {
+ using NativeType = VkSubresourceHostMemcpySizeEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySizeEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , size( size_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubresourceHostMemcpySizeEXT( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubresourceHostMemcpySizeEXT( *reinterpret_cast<SubresourceHostMemcpySizeEXT const *>( &rhs ) )
+ {
+ }
+
+ SubresourceHostMemcpySizeEXT & operator=( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SubresourceHostMemcpySizeEXT & operator=( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkSubresourceHostMemcpySizeEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubresourceHostMemcpySizeEXT *>( this );
+ }
+
+ operator VkSubresourceHostMemcpySizeEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubresourceHostMemcpySizeEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, size );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SubresourceHostMemcpySizeEXT const & ) const = default;
+#else
+ bool operator==( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size );
+# endif
+ }
+
+ bool operator!=( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySizeEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSubresourceHostMemcpySizeEXT>
+ {
+ using Type = SubresourceHostMemcpySizeEXT;
+ };
+
struct SubresourceLayout2EXT
{
using NativeType = VkSubresourceLayout2EXT;
diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp
index a0a1431..7e3a8f5 100644
--- a/include/vulkan/vulkan_to_string.hpp
+++ b/include/vulkan/vulkan_to_string.hpp
@@ -178,6 +178,8 @@ namespace VULKAN_HPP_NAMESPACE
result += "FragmentDensityMapEXT | ";
if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
result += "FragmentShadingRateAttachmentKHR | ";
+ if ( value & ImageUsageFlagBits::eHostTransferEXT )
+ result += "HostTransferEXT | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR )
result += "VideoEncodeDstKHR | ";
@@ -952,6 +954,8 @@ namespace VULKAN_HPP_NAMESPACE
result += "DescriptorBufferEXT | ";
if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT )
result += "EmbeddedImmutableSamplersEXT | ";
+ if ( value & DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV )
+ result += "IndirectBindableNV | ";
if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT )
result += "HostOnlyPoolEXT | ";
@@ -1801,6 +1805,8 @@ namespace VULKAN_HPP_NAMESPACE
result += "FragmentDensityMapEXT | ";
if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR )
result += "FragmentShadingRateAttachmentKHR | ";
+ if ( value & FormatFeatureFlagBits2::eHostImageTransferEXT )
+ result += "HostImageTransferEXT | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR )
result += "VideoEncodeInputKHR | ";
@@ -2775,6 +2781,20 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
+ //=== VK_EXT_host_image_copy ===
+
+ VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & HostImageCopyFlagBitsEXT::eMemcpy )
+ result += "Memcpy | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR )
@@ -3970,6 +3990,16 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR";
case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR";
case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR";
+ case StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT: return "PhysicalDeviceHostImageCopyFeaturesEXT";
+ case StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT: return "PhysicalDeviceHostImageCopyPropertiesEXT";
+ case StructureType::eMemoryToImageCopyEXT: return "MemoryToImageCopyEXT";
+ case StructureType::eImageToMemoryCopyEXT: return "ImageToMemoryCopyEXT";
+ case StructureType::eCopyImageToMemoryInfoEXT: return "CopyImageToMemoryInfoEXT";
+ case StructureType::eCopyMemoryToImageInfoEXT: return "CopyMemoryToImageInfoEXT";
+ case StructureType::eHostImageLayoutTransitionInfoEXT: return "HostImageLayoutTransitionInfoEXT";
+ case StructureType::eCopyImageToImageInfoEXT: return "CopyImageToImageInfoEXT";
+ case StructureType::eSubresourceHostMemcpySizeEXT: return "SubresourceHostMemcpySizeEXT";
+ case StructureType::eHostImageCopyDevicePerformanceQueryEXT: return "HostImageCopyDevicePerformanceQueryEXT";
case StructureType::eMemoryMapInfoKHR: return "MemoryMapInfoKHR";
case StructureType::eMemoryUnmapInfoKHR: return "MemoryUnmapInfoKHR";
case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT";
@@ -4178,6 +4208,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV: return "PhysicalDeviceCopyMemoryIndirectPropertiesNV";
case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV: return "PhysicalDeviceMemoryDecompressionFeaturesNV";
case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV: return "PhysicalDeviceMemoryDecompressionPropertiesNV";
+ case StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV";
+ case StructureType::eComputePipelineIndirectBufferInfoNV: return "ComputePipelineIndirectBufferInfoNV";
+ case StructureType::ePipelineIndirectDeviceAddressInfoNV: return "PipelineIndirectDeviceAddressInfoNV";
case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV";
case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT";
case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM";
@@ -4687,6 +4720,7 @@ namespace VULKAN_HPP_NAMESPACE
case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+ case ImageUsageFlagBits::eHostTransferEXT: return "HostTransferEXT";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
@@ -5618,6 +5652,7 @@ namespace VULKAN_HPP_NAMESPACE
case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT";
case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT";
+ case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV";
case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
@@ -6398,6 +6433,7 @@ namespace VULKAN_HPP_NAMESPACE
case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR";
case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+ case FormatFeatureFlagBits2::eHostImageTransferEXT: return "HostImageTransferEXT";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR";
case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
@@ -7780,6 +7816,17 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ //=== VK_EXT_host_image_copy ===
+
+ VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case HostImageCopyFlagBitsEXT::eMemcpy: return "Memcpy";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR )
@@ -7835,6 +7882,8 @@ namespace VULKAN_HPP_NAMESPACE
case IndirectCommandsTokenTypeNV::eDraw: return "Draw";
case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks";
case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks";
+ case IndirectCommandsTokenTypeNV::ePipeline: return "Pipeline";
+ case IndirectCommandsTokenTypeNV::eDispatch: return "Dispatch";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
diff --git a/include/vulkan/vulkan_video.hpp b/include/vulkan/vulkan_video.hpp
new file mode 100644
index 0000000..7465938
--- /dev/null
+++ b/include/vulkan/vulkan_video.hpp
@@ -0,0 +1,2696 @@
+// Copyright 2021-2023 The Khronos Group Inc.
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#include <vk_video/vulkan_video_codec_h264std.h>
+#include <vk_video/vulkan_video_codec_h264std_decode.h>
+#include <vk_video/vulkan_video_codec_h264std_encode.h>
+#include <vk_video/vulkan_video_codec_h265std.h>
+#include <vk_video/vulkan_video_codec_h265std_decode.h>
+#include <vk_video/vulkan_video_codec_h265std_encode.h>
+#include <vk_video/vulkan_video_codecs_common.h>
+#include <vulkan/vulkan.hpp>
+
+#if !defined( VULKAN_HPP_VIDEO_NAMESPACE )
+# define VULKAN_HPP_VIDEO_NAMESPACE video
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+ namespace VULKAN_HPP_VIDEO_NAMESPACE
+ {
+
+ //=============
+ //=== ENUMs ===
+ //=============
+
+ //=== vulkan_video_codec_h264std ===
+
+ enum class H264ChromaFormatIdc
+ {
+ eMonochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME,
+ e420 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_420,
+ e422 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_422,
+ e444 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_444,
+ eInvalid = STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID
+ };
+
+ enum class H264ProfileIdc
+ {
+ eBaseline = STD_VIDEO_H264_PROFILE_IDC_BASELINE,
+ eMain = STD_VIDEO_H264_PROFILE_IDC_MAIN,
+ eHigh = STD_VIDEO_H264_PROFILE_IDC_HIGH,
+ eHigh444Predictive = STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE,
+ eInvalid = STD_VIDEO_H264_PROFILE_IDC_INVALID
+ };
+
+ enum class H264LevelIdc
+ {
+ e1_0 = STD_VIDEO_H264_LEVEL_IDC_1_0,
+ e1_1 = STD_VIDEO_H264_LEVEL_IDC_1_1,
+ e1_2 = STD_VIDEO_H264_LEVEL_IDC_1_2,
+ e1_3 = STD_VIDEO_H264_LEVEL_IDC_1_3,
+ e2_0 = STD_VIDEO_H264_LEVEL_IDC_2_0,
+ e2_1 = STD_VIDEO_H264_LEVEL_IDC_2_1,
+ e2_2 = STD_VIDEO_H264_LEVEL_IDC_2_2,
+ e3_0 = STD_VIDEO_H264_LEVEL_IDC_3_0,
+ e3_1 = STD_VIDEO_H264_LEVEL_IDC_3_1,
+ e3_2 = STD_VIDEO_H264_LEVEL_IDC_3_2,
+ e4_0 = STD_VIDEO_H264_LEVEL_IDC_4_0,
+ e4_1 = STD_VIDEO_H264_LEVEL_IDC_4_1,
+ e4_2 = STD_VIDEO_H264_LEVEL_IDC_4_2,
+ e5_0 = STD_VIDEO_H264_LEVEL_IDC_5_0,
+ e5_1 = STD_VIDEO_H264_LEVEL_IDC_5_1,
+ e5_2 = STD_VIDEO_H264_LEVEL_IDC_5_2,
+ e6_0 = STD_VIDEO_H264_LEVEL_IDC_6_0,
+ e6_1 = STD_VIDEO_H264_LEVEL_IDC_6_1,
+ e6_2 = STD_VIDEO_H264_LEVEL_IDC_6_2,
+ eInvalid = STD_VIDEO_H264_LEVEL_IDC_INVALID
+ };
+
+ enum class H264PocType
+ {
+ e0 = STD_VIDEO_H264_POC_TYPE_0,
+ e1 = STD_VIDEO_H264_POC_TYPE_1,
+ e2 = STD_VIDEO_H264_POC_TYPE_2,
+ eInvalid = STD_VIDEO_H264_POC_TYPE_INVALID
+ };
+
+ enum class H264AspectRatioIdc
+ {
+ eUnspecified = STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED,
+ eSquare = STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE,
+ e12_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11,
+ e10_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11,
+ e16_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11,
+ e40_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33,
+ e24_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11,
+ e20_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11,
+ e32_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11,
+ e80_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33,
+ e18_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11,
+ e15_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11,
+ e64_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33,
+ e160_99 = STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99,
+ e4_3 = STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3,
+ e3_2 = STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2,
+ e2_1 = STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1,
+ eExtendedSar = STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR,
+ eInvalid = STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID
+ };
+
+ enum class H264WeightedBipredIdc
+ {
+ eDefault = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT,
+ eExplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT,
+ eImplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT,
+ eInvalid = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID
+ };
+
+ enum class H264ModificationOfPicNumsIdc
+ {
+ eShortTermSubtract = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT,
+ eShortTermAdd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD,
+ eLongTerm = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM,
+ eEnd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END,
+ eInvalid = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID
+ };
+
+ enum class H264MemMgmtControlOp
+ {
+ eEnd = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END,
+ eUnmarkShortTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM,
+ eUnmarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM,
+ eMarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM,
+ eSetMaxLongTermIndex = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX,
+ eUnmarkAll = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL,
+ eMarkCurrentAsLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM,
+ eInvalid = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID
+ };
+
+ enum class H264CabacInitIdc
+ {
+ e0 = STD_VIDEO_H264_CABAC_INIT_IDC_0,
+ e1 = STD_VIDEO_H264_CABAC_INIT_IDC_1,
+ e2 = STD_VIDEO_H264_CABAC_INIT_IDC_2,
+ eInvalid = STD_VIDEO_H264_CABAC_INIT_IDC_INVALID
+ };
+
+ enum class H264DisableDeblockingFilterIdc
+ {
+ eDisabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED,
+ eEnabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED,
+ ePartial = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL,
+ eInvalid = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID
+ };
+
+ enum class H264SliceType
+ {
+ eP = STD_VIDEO_H264_SLICE_TYPE_P,
+ eB = STD_VIDEO_H264_SLICE_TYPE_B,
+ eI = STD_VIDEO_H264_SLICE_TYPE_I,
+ eInvalid = STD_VIDEO_H264_SLICE_TYPE_INVALID
+ };
+
+ enum class H264PictureType
+ {
+ eP = STD_VIDEO_H264_PICTURE_TYPE_P,
+ eB = STD_VIDEO_H264_PICTURE_TYPE_B,
+ eI = STD_VIDEO_H264_PICTURE_TYPE_I,
+ eIdr = STD_VIDEO_H264_PICTURE_TYPE_IDR,
+ eInvalid = STD_VIDEO_H264_PICTURE_TYPE_INVALID
+ };
+
+ enum class H264NonVclNaluType
+ {
+ eSps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS,
+ ePps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS,
+ eAud = STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD,
+ ePrefix = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX,
+ eEndOfSequence = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE,
+ eEndOfStream = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM,
+ ePrecoded = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED,
+ eInvalid = STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID
+ };
+
+ //=== vulkan_video_codec_h264std_decode ===
+
+ enum class DecodeH264FieldOrderCount
+ {
+ eTop = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP,
+ eBottom = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM,
+ eInvalid = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID
+ };
+
+ //=== vulkan_video_codec_h265std ===
+
+ enum class H265ChromaFormatIdc
+ {
+ eMonochrome = STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME,
+ e420 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_420,
+ e422 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_422,
+ e444 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_444,
+ eInvalid = STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID
+ };
+
+ enum class H265ProfileIdc
+ {
+ eMain = STD_VIDEO_H265_PROFILE_IDC_MAIN,
+ eMain10 = STD_VIDEO_H265_PROFILE_IDC_MAIN_10,
+ eMainStillPicture = STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE,
+ eFormatRangeExtensions = STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS,
+ eSccExtensions = STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS,
+ eInvalid = STD_VIDEO_H265_PROFILE_IDC_INVALID
+ };
+
+ enum class H265LevelIdc
+ {
+ e1_0 = STD_VIDEO_H265_LEVEL_IDC_1_0,
+ e2_0 = STD_VIDEO_H265_LEVEL_IDC_2_0,
+ e2_1 = STD_VIDEO_H265_LEVEL_IDC_2_1,
+ e3_0 = STD_VIDEO_H265_LEVEL_IDC_3_0,
+ e3_1 = STD_VIDEO_H265_LEVEL_IDC_3_1,
+ e4_0 = STD_VIDEO_H265_LEVEL_IDC_4_0,
+ e4_1 = STD_VIDEO_H265_LEVEL_IDC_4_1,
+ e5_0 = STD_VIDEO_H265_LEVEL_IDC_5_0,
+ e5_1 = STD_VIDEO_H265_LEVEL_IDC_5_1,
+ e5_2 = STD_VIDEO_H265_LEVEL_IDC_5_2,
+ e6_0 = STD_VIDEO_H265_LEVEL_IDC_6_0,
+ e6_1 = STD_VIDEO_H265_LEVEL_IDC_6_1,
+ e6_2 = STD_VIDEO_H265_LEVEL_IDC_6_2,
+ eInvalid = STD_VIDEO_H265_LEVEL_IDC_INVALID
+ };
+
+ enum class H265SliceType
+ {
+ eB = STD_VIDEO_H265_SLICE_TYPE_B,
+ eP = STD_VIDEO_H265_SLICE_TYPE_P,
+ eI = STD_VIDEO_H265_SLICE_TYPE_I,
+ eInvalid = STD_VIDEO_H265_SLICE_TYPE_INVALID
+ };
+
+ enum class H265PictureType
+ {
+ eP = STD_VIDEO_H265_PICTURE_TYPE_P,
+ eB = STD_VIDEO_H265_PICTURE_TYPE_B,
+ eI = STD_VIDEO_H265_PICTURE_TYPE_I,
+ eIdr = STD_VIDEO_H265_PICTURE_TYPE_IDR,
+ eInvalid = STD_VIDEO_H265_PICTURE_TYPE_INVALID
+ };
+
+ enum class H265AspectRatioIdc
+ {
+ eUnspecified = STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED,
+ eSquare = STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE,
+ e12_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11,
+ e10_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11,
+ e16_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11,
+ e40_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33,
+ e24_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11,
+ e20_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11,
+ e32_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11,
+ e80_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33,
+ e18_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11,
+ e15_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11,
+ e64_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33,
+ e160_99 = STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99,
+ e4_3 = STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3,
+ e3_2 = STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2,
+ e2_1 = STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1,
+ eExtendedSar = STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR,
+ eInvalid = STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID
+ };
+
+ //===============
+ //=== STRUCTS ===
+ //===============
+
+ //=== vulkan_video_codec_h264std ===
+
+ struct H264SpsVuiFlags
+ {
+ using NativeType = StdVideoH264SpsVuiFlags;
+
+ operator StdVideoH264SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264SpsVuiFlags *>( this );
+ }
+
+ operator StdVideoH264SpsVuiFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264SpsVuiFlags *>( this );
+ }
+
+ bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) &&
+ ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) &&
+ ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) &&
+ ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) &&
+ ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) &&
+ ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) &&
+ ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag );
+ }
+
+ bool operator!=( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t aspect_ratio_info_present_flag : 1;
+ uint32_t overscan_info_present_flag : 1;
+ uint32_t overscan_appropriate_flag : 1;
+ uint32_t video_signal_type_present_flag : 1;
+ uint32_t video_full_range_flag : 1;
+ uint32_t color_description_present_flag : 1;
+ uint32_t chroma_loc_info_present_flag : 1;
+ uint32_t timing_info_present_flag : 1;
+ uint32_t fixed_frame_rate_flag : 1;
+ uint32_t bitstream_restriction_flag : 1;
+ uint32_t nal_hrd_parameters_present_flag : 1;
+ uint32_t vcl_hrd_parameters_present_flag : 1;
+ };
+
+ struct H264HrdParameters
+ {
+ using NativeType = StdVideoH264HrdParameters;
+
+ operator StdVideoH264HrdParameters const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264HrdParameters *>( this );
+ }
+
+ operator StdVideoH264HrdParameters &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264HrdParameters *>( this );
+ }
+
+ bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) &&
+ ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) &&
+ ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) &&
+ ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) &&
+ ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) &&
+ ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length );
+ }
+
+ bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint8_t cpb_cnt_minus1 = {};
+ uint8_t bit_rate_scale = {};
+ uint8_t cpb_size_scale = {};
+ uint8_t reserved1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H264_CPB_CNT_LIST_SIZE> bit_rate_value_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H264_CPB_CNT_LIST_SIZE> cpb_size_value_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H264_CPB_CNT_LIST_SIZE> cbr_flag = {};
+ uint32_t initial_cpb_removal_delay_length_minus1 = {};
+ uint32_t cpb_removal_delay_length_minus1 = {};
+ uint32_t dpb_output_delay_length_minus1 = {};
+ uint32_t time_offset_length = {};
+ };
+
+ struct H264SequenceParameterSetVui
+ {
+ using NativeType = StdVideoH264SequenceParameterSetVui;
+
+ operator StdVideoH264SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264SequenceParameterSetVui *>( this );
+ }
+
+ operator StdVideoH264SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264SequenceParameterSetVui *>( this );
+ }
+
+ bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) &&
+ ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) &&
+ ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) &&
+ ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) &&
+ ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) &&
+ ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) &&
+ ( pHrdParameters == rhs.pHrdParameters );
+ }
+
+ bool operator!=( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsVuiFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc aspect_ratio_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc::eUnspecified;
+ uint16_t sar_width = {};
+ uint16_t sar_height = {};
+ uint8_t video_format = {};
+ uint8_t colour_primaries = {};
+ uint8_t transfer_characteristics = {};
+ uint8_t matrix_coefficients = {};
+ uint32_t num_units_in_tick = {};
+ uint32_t time_scale = {};
+ uint8_t max_num_reorder_frames = {};
+ uint8_t max_dec_frame_buffering = {};
+ uint8_t chroma_sample_loc_type_top_field = {};
+ uint8_t chroma_sample_loc_type_bottom_field = {};
+ uint32_t reserved1 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264HrdParameters * pHrdParameters = {};
+ };
+
+ struct H264SpsFlags
+ {
+ using NativeType = StdVideoH264SpsFlags;
+
+ operator StdVideoH264SpsFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264SpsFlags *>( this );
+ }
+
+ operator StdVideoH264SpsFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264SpsFlags *>( this );
+ }
+
+ bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) &&
+ ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) &&
+ ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) &&
+ ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) &&
+ ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) &&
+ ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) &&
+ ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) &&
+ ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) &&
+ ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag );
+ }
+
+ bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t constraint_set0_flag : 1;
+ uint32_t constraint_set1_flag : 1;
+ uint32_t constraint_set2_flag : 1;
+ uint32_t constraint_set3_flag : 1;
+ uint32_t constraint_set4_flag : 1;
+ uint32_t constraint_set5_flag : 1;
+ uint32_t direct_8x8_inference_flag : 1;
+ uint32_t mb_adaptive_frame_field_flag : 1;
+ uint32_t frame_mbs_only_flag : 1;
+ uint32_t delta_pic_order_always_zero_flag : 1;
+ uint32_t separate_colour_plane_flag : 1;
+ uint32_t gaps_in_frame_num_value_allowed_flag : 1;
+ uint32_t qpprime_y_zero_transform_bypass_flag : 1;
+ uint32_t frame_cropping_flag : 1;
+ uint32_t seq_scaling_matrix_present_flag : 1;
+ uint32_t vui_parameters_present_flag : 1;
+ };
+
+ struct H264ScalingLists
+ {
+ using NativeType = StdVideoH264ScalingLists;
+
+ operator StdVideoH264ScalingLists const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264ScalingLists *>( this );
+ }
+
+ operator StdVideoH264ScalingLists &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264ScalingLists *>( this );
+ }
+
+ bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) &&
+ ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 );
+ }
+
+ bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint16_t scaling_list_present_mask = {};
+ uint16_t use_default_scaling_matrix_mask = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS, STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS>
+ ScalingList4x4 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS, STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS>
+ ScalingList8x8 = {};
+ };
+
+ struct H264SequenceParameterSet
+ {
+ using NativeType = StdVideoH264SequenceParameterSet;
+
+ operator StdVideoH264SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264SequenceParameterSet *>( this );
+ }
+
+ operator StdVideoH264SequenceParameterSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264SequenceParameterSet *>( this );
+ }
+
+ bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) &&
+ ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) &&
+ ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) &&
+ ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) &&
+ ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) &&
+ ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) &&
+ ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) &&
+ ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) &&
+ ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) &&
+ ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) &&
+ ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) &&
+ ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) &&
+ ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui );
+ }
+
+ bool operator!=( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc profile_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc::eBaseline;
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc::e1_0;
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc chroma_format_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc::eMonochrome;
+ uint8_t seq_parameter_set_id = {};
+ uint8_t bit_depth_luma_minus8 = {};
+ uint8_t bit_depth_chroma_minus8 = {};
+ uint8_t log2_max_frame_num_minus4 = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType pic_order_cnt_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType::e0;
+ int32_t offset_for_non_ref_pic = {};
+ int32_t offset_for_top_to_bottom_field = {};
+ uint8_t log2_max_pic_order_cnt_lsb_minus4 = {};
+ uint8_t num_ref_frames_in_pic_order_cnt_cycle = {};
+ uint8_t max_num_ref_frames = {};
+ uint8_t reserved1 = {};
+ uint32_t pic_width_in_mbs_minus1 = {};
+ uint32_t pic_height_in_map_units_minus1 = {};
+ uint32_t frame_crop_left_offset = {};
+ uint32_t frame_crop_right_offset = {};
+ uint32_t frame_crop_top_offset = {};
+ uint32_t frame_crop_bottom_offset = {};
+ uint32_t reserved2 = {};
+ const int32_t * pOffsetForRefFrame = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SequenceParameterSetVui * pSequenceParameterSetVui = {};
+ };
+
+ struct H264PpsFlags
+ {
+ using NativeType = StdVideoH264PpsFlags;
+
+ operator StdVideoH264PpsFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264PpsFlags *>( this );
+ }
+
+ operator StdVideoH264PpsFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264PpsFlags *>( this );
+ }
+
+ bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) &&
+ ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) &&
+ ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) &&
+ ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) &&
+ ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag );
+ }
+
+ bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t transform_8x8_mode_flag : 1;
+ uint32_t redundant_pic_cnt_present_flag : 1;
+ uint32_t constrained_intra_pred_flag : 1;
+ uint32_t deblocking_filter_control_present_flag : 1;
+ uint32_t weighted_pred_flag : 1;
+ uint32_t bottom_field_pic_order_in_frame_present_flag : 1;
+ uint32_t entropy_coding_mode_flag : 1;
+ uint32_t pic_scaling_matrix_present_flag : 1;
+ };
+
+ struct H264PictureParameterSet
+ {
+ using NativeType = StdVideoH264PictureParameterSet;
+
+ operator StdVideoH264PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH264PictureParameterSet *>( this );
+ }
+
+ operator StdVideoH264PictureParameterSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH264PictureParameterSet *>( this );
+ }
+
+ bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) &&
+ ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) &&
+ ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) &&
+ ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) &&
+ ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) &&
+ ( pScalingLists == rhs.pScalingLists );
+ }
+
+ bool operator!=( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PpsFlags flags = {};
+ uint8_t seq_parameter_set_id = {};
+ uint8_t pic_parameter_set_id = {};
+ uint8_t num_ref_idx_l0_default_active_minus1 = {};
+ uint8_t num_ref_idx_l1_default_active_minus1 = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc weighted_bipred_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc::eDefault;
+ int8_t pic_init_qp_minus26 = {};
+ int8_t pic_init_qs_minus26 = {};
+ int8_t chroma_qp_index_offset = {};
+ int8_t second_chroma_qp_index_offset = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {};
+ };
+
+ //=== vulkan_video_codec_h264std_decode ===
+
+ struct DecodeH264PictureInfoFlags
+ {
+ using NativeType = StdVideoDecodeH264PictureInfoFlags;
+
+ operator StdVideoDecodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH264PictureInfoFlags *>( this );
+ }
+
+ operator StdVideoDecodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH264PictureInfoFlags *>( this );
+ }
+
+ bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) &&
+ ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) &&
+ ( complementary_field_pair == rhs.complementary_field_pair );
+ }
+
+ bool operator!=( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t field_pic_flag : 1;
+ uint32_t is_intra : 1;
+ uint32_t IdrPicFlag : 1;
+ uint32_t bottom_field_flag : 1;
+ uint32_t is_reference : 1;
+ uint32_t complementary_field_pair : 1;
+ };
+
+ struct DecodeH264PictureInfo
+ {
+ using NativeType = StdVideoDecodeH264PictureInfo;
+
+ operator StdVideoDecodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH264PictureInfo *>( this );
+ }
+
+ operator StdVideoDecodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH264PictureInfo *>( this );
+ }
+
+ bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) &&
+ ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) &&
+ ( PicOrderCnt == rhs.PicOrderCnt );
+ }
+
+ bool operator!=( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264PictureInfoFlags flags = {};
+ uint8_t seq_parameter_set_id = {};
+ uint8_t pic_parameter_set_id = {};
+ uint8_t reserved1 = {};
+ uint8_t reserved2 = {};
+ uint16_t frame_num = {};
+ uint16_t idr_pic_id = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE> PicOrderCnt = {};
+ };
+
+ struct DecodeH264ReferenceInfoFlags
+ {
+ using NativeType = StdVideoDecodeH264ReferenceInfoFlags;
+
+ operator StdVideoDecodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH264ReferenceInfoFlags *>( this );
+ }
+
+ operator StdVideoDecodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH264ReferenceInfoFlags *>( this );
+ }
+
+ bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) &&
+ ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing );
+ }
+
+ bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t top_field_flag : 1;
+ uint32_t bottom_field_flag : 1;
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t is_non_existing : 1;
+ };
+
+ struct DecodeH264ReferenceInfo
+ {
+ using NativeType = StdVideoDecodeH264ReferenceInfo;
+
+ operator StdVideoDecodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH264ReferenceInfo *>( this );
+ }
+
+ operator StdVideoDecodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH264ReferenceInfo *>( this );
+ }
+
+ bool operator==( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( FrameNum == rhs.FrameNum ) && ( reserved == rhs.reserved ) && ( PicOrderCnt == rhs.PicOrderCnt );
+ }
+
+ bool operator!=( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264ReferenceInfoFlags flags = {};
+ uint16_t FrameNum = {};
+ uint16_t reserved = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE> PicOrderCnt = {};
+ };
+
+ //=== vulkan_video_codec_h264std_encode ===
+
+ struct EncodeH264WeightTableFlags
+ {
+ using NativeType = StdVideoEncodeH264WeightTableFlags;
+
+ operator StdVideoEncodeH264WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264WeightTableFlags *>( this );
+ }
+
+ operator StdVideoEncodeH264WeightTableFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264WeightTableFlags *>( this );
+ }
+
+ bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) &&
+ ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag );
+ }
+
+ bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t luma_weight_l0_flag = {};
+ uint32_t chroma_weight_l0_flag = {};
+ uint32_t luma_weight_l1_flag = {};
+ uint32_t chroma_weight_l1_flag = {};
+ };
+
+ struct EncodeH264WeightTable
+ {
+ using NativeType = StdVideoEncodeH264WeightTable;
+
+ operator StdVideoEncodeH264WeightTable const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264WeightTable *>( this );
+ }
+
+ operator StdVideoEncodeH264WeightTable &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264WeightTable *>( this );
+ }
+
+ bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) &&
+ ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) &&
+ ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) &&
+ ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) &&
+ ( chroma_offset_l1 == rhs.chroma_offset_l1 );
+ }
+
+ bool operator!=( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTableFlags flags = {};
+ uint8_t luma_log2_weight_denom = {};
+ uint8_t chroma_log2_weight_denom = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF> luma_weight_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF> luma_offset_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_weight_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_offset_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF> luma_weight_l1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF> luma_offset_l1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_weight_l1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_offset_l1 = {};
+ };
+
+ struct EncodeH264SliceHeaderFlags
+ {
+ using NativeType = StdVideoEncodeH264SliceHeaderFlags;
+
+ operator StdVideoEncodeH264SliceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264SliceHeaderFlags *>( this );
+ }
+
+ operator StdVideoEncodeH264SliceHeaderFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264SliceHeaderFlags *>( this );
+ }
+
+ bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) &&
+ ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t direct_spatial_mv_pred_flag : 1;
+ uint32_t num_ref_idx_active_override_flag : 1;
+ uint32_t reserved : 30;
+ };
+
+ struct EncodeH264PictureInfoFlags
+ {
+ using NativeType = StdVideoEncodeH264PictureInfoFlags;
+
+ operator StdVideoEncodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264PictureInfoFlags *>( this );
+ }
+
+ operator StdVideoEncodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264PictureInfoFlags *>( this );
+ }
+
+ bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) &&
+ ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) &&
+ ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t IdrPicFlag : 1;
+ uint32_t is_reference : 1;
+ uint32_t no_output_of_prior_pics_flag : 1;
+ uint32_t long_term_reference_flag : 1;
+ uint32_t adaptive_ref_pic_marking_mode_flag : 1;
+ uint32_t reserved : 27;
+ };
+
+ struct EncodeH264ReferenceInfoFlags
+ {
+ using NativeType = StdVideoEncodeH264ReferenceInfoFlags;
+
+ operator StdVideoEncodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264ReferenceInfoFlags *>( this );
+ }
+
+ operator StdVideoEncodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264ReferenceInfoFlags *>( this );
+ }
+
+ bool operator==( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t reserved : 31;
+ };
+
+ struct EncodeH264ReferenceListsInfoFlags
+ {
+ using NativeType = StdVideoEncodeH264ReferenceListsInfoFlags;
+
+ operator StdVideoEncodeH264ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264ReferenceListsInfoFlags *>( this );
+ }
+
+ operator StdVideoEncodeH264ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264ReferenceListsInfoFlags *>( this );
+ }
+
+ bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) &&
+ ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t ref_pic_list_modification_flag_l0 : 1;
+ uint32_t ref_pic_list_modification_flag_l1 : 1;
+ uint32_t reserved : 30;
+ };
+
+ struct EncodeH264RefListModEntry
+ {
+ using NativeType = StdVideoEncodeH264RefListModEntry;
+
+ operator StdVideoEncodeH264RefListModEntry const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264RefListModEntry *>( this );
+ }
+
+ operator StdVideoEncodeH264RefListModEntry &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264RefListModEntry *>( this );
+ }
+
+ bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) &&
+ ( long_term_pic_num == rhs.long_term_pic_num );
+ }
+
+ bool operator!=( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc modification_of_pic_nums_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc::eShortTermSubtract;
+ uint16_t abs_diff_pic_num_minus1 = {};
+ uint16_t long_term_pic_num = {};
+ };
+
+ struct EncodeH264RefPicMarkingEntry
+ {
+ using NativeType = StdVideoEncodeH264RefPicMarkingEntry;
+
+ operator StdVideoEncodeH264RefPicMarkingEntry const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264RefPicMarkingEntry *>( this );
+ }
+
+ operator StdVideoEncodeH264RefPicMarkingEntry &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264RefPicMarkingEntry *>( this );
+ }
+
+ bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( operation == rhs.operation ) && ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) &&
+ ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) &&
+ ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 );
+ }
+
+ bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp operation =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp::eEnd;
+ uint16_t difference_of_pic_nums_minus1 = {};
+ uint16_t long_term_pic_num = {};
+ uint16_t long_term_frame_idx = {};
+ uint16_t max_long_term_frame_idx_plus1 = {};
+ };
+
+ struct EncodeH264ReferenceListsInfo
+ {
+ using NativeType = StdVideoEncodeH264ReferenceListsInfo;
+
+ operator StdVideoEncodeH264ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264ReferenceListsInfo *>( this );
+ }
+
+ operator StdVideoEncodeH264ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264ReferenceListsInfo *>( this );
+ }
+
+ bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) &&
+ ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) &&
+ ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) &&
+ ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) &&
+ ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) &&
+ ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations );
+ }
+
+ bool operator!=( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfoFlags flags = {};
+ uint8_t num_ref_idx_l0_active_minus1 = {};
+ uint8_t num_ref_idx_l1_active_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF> RefPicList0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF> RefPicList1 = {};
+ uint8_t refList0ModOpCount = {};
+ uint8_t refList1ModOpCount = {};
+ uint8_t refPicMarkingOpCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 7> reserved1 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList0ModOperations = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList1ModOperations = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefPicMarkingEntry * pRefPicMarkingOperations = {};
+ };
+
+ struct EncodeH264PictureInfo
+ {
+ using NativeType = StdVideoEncodeH264PictureInfo;
+
+ operator StdVideoEncodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264PictureInfo *>( this );
+ }
+
+ operator StdVideoEncodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264PictureInfo *>( this );
+ }
+
+ bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) &&
+ ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) &&
+ ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists );
+ }
+
+ bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264PictureInfoFlags flags = {};
+ uint8_t seq_parameter_set_id = {};
+ uint8_t pic_parameter_set_id = {};
+ uint16_t idr_pic_id = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP;
+ uint32_t frame_num = {};
+ int32_t PicOrderCnt = {};
+ uint8_t temporal_id = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 3> reserved1 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfo * pRefLists = {};
+ };
+
+ struct EncodeH264ReferenceInfo
+ {
+ using NativeType = StdVideoEncodeH264ReferenceInfo;
+
+ operator StdVideoEncodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264ReferenceInfo *>( this );
+ }
+
+ operator StdVideoEncodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264ReferenceInfo *>( this );
+ }
+
+ bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) &&
+ ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id );
+ }
+
+ bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceInfoFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP;
+ uint32_t FrameNum = {};
+ int32_t PicOrderCnt = {};
+ uint16_t long_term_pic_num = {};
+ uint16_t long_term_frame_idx = {};
+ uint8_t temporal_id = {};
+ };
+
+ struct EncodeH264SliceHeader
+ {
+ using NativeType = StdVideoEncodeH264SliceHeader;
+
+ operator StdVideoEncodeH264SliceHeader const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH264SliceHeader *>( this );
+ }
+
+ operator StdVideoEncodeH264SliceHeader &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH264SliceHeader *>( this );
+ }
+
+ bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) &&
+ ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) &&
+ ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) &&
+ ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable );
+ }
+
+ bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264SliceHeaderFlags flags = {};
+ uint32_t first_mb_in_slice = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType::eP;
+ int8_t slice_alpha_c0_offset_div2 = {};
+ int8_t slice_beta_offset_div2 = {};
+ uint16_t reserved1 = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc cabac_init_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc::e0;
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc disable_deblocking_filter_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc::eDisabled;
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTable * pWeightTable = {};
+ };
+
+ //=== vulkan_video_codec_h265std ===
+
+ struct H265DecPicBufMgr
+ {
+ using NativeType = StdVideoH265DecPicBufMgr;
+
+ operator StdVideoH265DecPicBufMgr const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265DecPicBufMgr *>( this );
+ }
+
+ operator StdVideoH265DecPicBufMgr &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265DecPicBufMgr *>( this );
+ }
+
+ bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) &&
+ ( max_num_reorder_pics == rhs.max_num_reorder_pics );
+ }
+
+ bool operator!=( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE> max_latency_increase_plus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE> max_dec_pic_buffering_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE> max_num_reorder_pics = {};
+ };
+
+ struct H265SubLayerHrdParameters
+ {
+ using NativeType = StdVideoH265SubLayerHrdParameters;
+
+ operator StdVideoH265SubLayerHrdParameters const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265SubLayerHrdParameters *>( this );
+ }
+
+ operator StdVideoH265SubLayerHrdParameters &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265SubLayerHrdParameters *>( this );
+ }
+
+ bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) &&
+ ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) &&
+ ( cbr_flag == rhs.cbr_flag );
+ }
+
+ bool operator!=( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> bit_rate_value_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> cpb_size_value_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> cpb_size_du_value_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> bit_rate_du_value_minus1 = {};
+ uint32_t cbr_flag = {};
+ };
+
+ struct H265HrdFlags
+ {
+ using NativeType = StdVideoH265HrdFlags;
+
+ operator StdVideoH265HrdFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265HrdFlags *>( this );
+ }
+
+ operator StdVideoH265HrdFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265HrdFlags *>( this );
+ }
+
+ bool operator==( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) &&
+ ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) &&
+ ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) &&
+ ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) &&
+ ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) &&
+ ( low_delay_hrd_flag == rhs.low_delay_hrd_flag );
+ }
+
+ bool operator!=( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t nal_hrd_parameters_present_flag : 1;
+ uint32_t vcl_hrd_parameters_present_flag : 1;
+ uint32_t sub_pic_hrd_params_present_flag : 1;
+ uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1;
+ uint32_t fixed_pic_rate_general_flag : 8;
+ uint32_t fixed_pic_rate_within_cvs_flag : 8;
+ uint32_t low_delay_hrd_flag : 8;
+ };
+
+ struct H265HrdParameters
+ {
+ using NativeType = StdVideoH265HrdParameters;
+
+ operator StdVideoH265HrdParameters const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265HrdParameters *>( this );
+ }
+
+ operator StdVideoH265HrdParameters &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265HrdParameters *>( this );
+ }
+
+ bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) &&
+ ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) &&
+ ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) &&
+ ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) &&
+ ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) &&
+ ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) &&
+ ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) &&
+ ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) &&
+ ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl );
+ }
+
+ bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdFlags flags = {};
+ uint8_t tick_divisor_minus2 = {};
+ uint8_t du_cpb_removal_delay_increment_length_minus1 = {};
+ uint8_t dpb_output_delay_du_length_minus1 = {};
+ uint8_t bit_rate_scale = {};
+ uint8_t cpb_size_scale = {};
+ uint8_t cpb_size_du_scale = {};
+ uint8_t initial_cpb_removal_delay_length_minus1 = {};
+ uint8_t au_cpb_removal_delay_length_minus1 = {};
+ uint8_t dpb_output_delay_length_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE> cpb_cnt_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE> elemental_duration_in_tc_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, 3> reserved = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersNal = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersVcl = {};
+ };
+
+ struct H265VpsFlags
+ {
+ using NativeType = StdVideoH265VpsFlags;
+
+ operator StdVideoH265VpsFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265VpsFlags *>( this );
+ }
+
+ operator StdVideoH265VpsFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265VpsFlags *>( this );
+ }
+
+ bool operator==( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( vps_temporal_id_nesting_flag == rhs.vps_temporal_id_nesting_flag ) &&
+ ( vps_sub_layer_ordering_info_present_flag == rhs.vps_sub_layer_ordering_info_present_flag ) &&
+ ( vps_timing_info_present_flag == rhs.vps_timing_info_present_flag ) &&
+ ( vps_poc_proportional_to_timing_flag == rhs.vps_poc_proportional_to_timing_flag );
+ }
+
+ bool operator!=( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t vps_temporal_id_nesting_flag : 1;
+ uint32_t vps_sub_layer_ordering_info_present_flag : 1;
+ uint32_t vps_timing_info_present_flag : 1;
+ uint32_t vps_poc_proportional_to_timing_flag : 1;
+ };
+
+ struct H265ProfileTierLevelFlags
+ {
+ using NativeType = StdVideoH265ProfileTierLevelFlags;
+
+ operator StdVideoH265ProfileTierLevelFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265ProfileTierLevelFlags *>( this );
+ }
+
+ operator StdVideoH265ProfileTierLevelFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265ProfileTierLevelFlags *>( this );
+ }
+
+ bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) &&
+ ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) &&
+ ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) &&
+ ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag );
+ }
+
+ bool operator!=( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t general_tier_flag : 1;
+ uint32_t general_progressive_source_flag : 1;
+ uint32_t general_interlaced_source_flag : 1;
+ uint32_t general_non_packed_constraint_flag : 1;
+ uint32_t general_frame_only_constraint_flag : 1;
+ };
+
+ struct H265ProfileTierLevel
+ {
+ using NativeType = StdVideoH265ProfileTierLevel;
+
+ operator StdVideoH265ProfileTierLevel const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265ProfileTierLevel *>( this );
+ }
+
+ operator StdVideoH265ProfileTierLevel &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265ProfileTierLevel *>( this );
+ }
+
+ bool operator==( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( general_profile_idc == rhs.general_profile_idc ) && ( general_level_idc == rhs.general_level_idc );
+ }
+
+ bool operator!=( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevelFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc general_profile_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc::eMain;
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc general_level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc::e1_0;
+ };
+
+ struct H265VideoParameterSet
+ {
+ using NativeType = StdVideoH265VideoParameterSet;
+
+ operator StdVideoH265VideoParameterSet const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265VideoParameterSet *>( this );
+ }
+
+ operator StdVideoH265VideoParameterSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265VideoParameterSet *>( this );
+ }
+
+ bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) &&
+ ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) &&
+ ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) &&
+ ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) &&
+ ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel );
+ }
+
+ bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265VpsFlags flags = {};
+ uint8_t vps_video_parameter_set_id = {};
+ uint8_t vps_max_sub_layers_minus1 = {};
+ uint8_t reserved1 = {};
+ uint8_t reserved2 = {};
+ uint32_t vps_num_units_in_tick = {};
+ uint32_t vps_time_scale = {};
+ uint32_t vps_num_ticks_poc_diff_one_minus1 = {};
+ uint32_t reserved3 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {};
+ };
+
+ struct H265ScalingLists
+ {
+ using NativeType = StdVideoH265ScalingLists;
+
+ operator StdVideoH265ScalingLists const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265ScalingLists *>( this );
+ }
+
+ operator StdVideoH265ScalingLists &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265ScalingLists *>( this );
+ }
+
+ bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) &&
+ ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) &&
+ ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 );
+ }
+
+ bool operator!=( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS>
+ ScalingList4x4 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS>
+ ScalingList8x8 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS>
+ ScalingList16x16 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS>
+ ScalingList32x32 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS> ScalingListDCCoef16x16 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS> ScalingListDCCoef32x32 = {};
+ };
+
+ struct H265SpsVuiFlags
+ {
+ using NativeType = StdVideoH265SpsVuiFlags;
+
+ operator StdVideoH265SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265SpsVuiFlags *>( this );
+ }
+
+ operator StdVideoH265SpsVuiFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265SpsVuiFlags *>( this );
+ }
+
+ bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) &&
+ ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) &&
+ ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) &&
+ ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) &&
+ ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) &&
+ ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) &&
+ ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) &&
+ ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) &&
+ ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) &&
+ ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) &&
+ ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) &&
+ ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag );
+ }
+
+ bool operator!=( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t aspect_ratio_info_present_flag : 1;
+ uint32_t overscan_info_present_flag : 1;
+ uint32_t overscan_appropriate_flag : 1;
+ uint32_t video_signal_type_present_flag : 1;
+ uint32_t video_full_range_flag : 1;
+ uint32_t colour_description_present_flag : 1;
+ uint32_t chroma_loc_info_present_flag : 1;
+ uint32_t neutral_chroma_indication_flag : 1;
+ uint32_t field_seq_flag : 1;
+ uint32_t frame_field_info_present_flag : 1;
+ uint32_t default_display_window_flag : 1;
+ uint32_t vui_timing_info_present_flag : 1;
+ uint32_t vui_poc_proportional_to_timing_flag : 1;
+ uint32_t vui_hrd_parameters_present_flag : 1;
+ uint32_t bitstream_restriction_flag : 1;
+ uint32_t tiles_fixed_structure_flag : 1;
+ uint32_t motion_vectors_over_pic_boundaries_flag : 1;
+ uint32_t restricted_ref_pic_lists_flag : 1;
+ };
+
+ struct H265SequenceParameterSetVui
+ {
+ using NativeType = StdVideoH265SequenceParameterSetVui;
+
+ operator StdVideoH265SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265SequenceParameterSetVui *>( this );
+ }
+
+ operator StdVideoH265SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265SequenceParameterSetVui *>( this );
+ }
+
+ bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) &&
+ ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) &&
+ ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) &&
+ ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) &&
+ ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) &&
+ ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) &&
+ ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) &&
+ ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) &&
+ ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) &&
+ ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) &&
+ ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) &&
+ ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) &&
+ ( pHrdParameters == rhs.pHrdParameters );
+ }
+
+ bool operator!=( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsVuiFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc aspect_ratio_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc::eUnspecified;
+ uint16_t sar_width = {};
+ uint16_t sar_height = {};
+ uint8_t video_format = {};
+ uint8_t colour_primaries = {};
+ uint8_t transfer_characteristics = {};
+ uint8_t matrix_coeffs = {};
+ uint8_t chroma_sample_loc_type_top_field = {};
+ uint8_t chroma_sample_loc_type_bottom_field = {};
+ uint8_t reserved1 = {};
+ uint8_t reserved2 = {};
+ uint16_t def_disp_win_left_offset = {};
+ uint16_t def_disp_win_right_offset = {};
+ uint16_t def_disp_win_top_offset = {};
+ uint16_t def_disp_win_bottom_offset = {};
+ uint32_t vui_num_units_in_tick = {};
+ uint32_t vui_time_scale = {};
+ uint32_t vui_num_ticks_poc_diff_one_minus1 = {};
+ uint16_t min_spatial_segmentation_idc = {};
+ uint16_t reserved3 = {};
+ uint8_t max_bytes_per_pic_denom = {};
+ uint8_t max_bits_per_min_cu_denom = {};
+ uint8_t log2_max_mv_length_horizontal = {};
+ uint8_t log2_max_mv_length_vertical = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {};
+ };
+
+ struct H265PredictorPaletteEntries
+ {
+ using NativeType = StdVideoH265PredictorPaletteEntries;
+
+ operator StdVideoH265PredictorPaletteEntries const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265PredictorPaletteEntries *>( this );
+ }
+
+ operator StdVideoH265PredictorPaletteEntries &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265PredictorPaletteEntries *>( this );
+ }
+
+ bool operator==( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( PredictorPaletteEntries == rhs.PredictorPaletteEntries );
+ }
+
+ bool operator!=( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::
+ ArrayWrapper2D<uint16_t, STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE, STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE>
+ PredictorPaletteEntries = {};
+ };
+
+ struct H265SpsFlags
+ {
+ using NativeType = StdVideoH265SpsFlags;
+
+ operator StdVideoH265SpsFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265SpsFlags *>( this );
+ }
+
+ operator StdVideoH265SpsFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265SpsFlags *>( this );
+ }
+
+ bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) &&
+ ( conformance_window_flag == rhs.conformance_window_flag ) &&
+ ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) &&
+ ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) &&
+ ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) &&
+ ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) &&
+ ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) &&
+ ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) &&
+ ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) &&
+ ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) &&
+ ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) &&
+ ( sps_range_extension_flag == rhs.sps_range_extension_flag ) &&
+ ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) &&
+ ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) &&
+ ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) &&
+ ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) &&
+ ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) &&
+ ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) &&
+ ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) &&
+ ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) &&
+ ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) &&
+ ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) &&
+ ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag );
+ }
+
+ bool operator!=( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t sps_temporal_id_nesting_flag : 1;
+ uint32_t separate_colour_plane_flag : 1;
+ uint32_t conformance_window_flag : 1;
+ uint32_t sps_sub_layer_ordering_info_present_flag : 1;
+ uint32_t scaling_list_enabled_flag : 1;
+ uint32_t sps_scaling_list_data_present_flag : 1;
+ uint32_t amp_enabled_flag : 1;
+ uint32_t sample_adaptive_offset_enabled_flag : 1;
+ uint32_t pcm_enabled_flag : 1;
+ uint32_t pcm_loop_filter_disabled_flag : 1;
+ uint32_t long_term_ref_pics_present_flag : 1;
+ uint32_t sps_temporal_mvp_enabled_flag : 1;
+ uint32_t strong_intra_smoothing_enabled_flag : 1;
+ uint32_t vui_parameters_present_flag : 1;
+ uint32_t sps_extension_present_flag : 1;
+ uint32_t sps_range_extension_flag : 1;
+ uint32_t transform_skip_rotation_enabled_flag : 1;
+ uint32_t transform_skip_context_enabled_flag : 1;
+ uint32_t implicit_rdpcm_enabled_flag : 1;
+ uint32_t explicit_rdpcm_enabled_flag : 1;
+ uint32_t extended_precision_processing_flag : 1;
+ uint32_t intra_smoothing_disabled_flag : 1;
+ uint32_t high_precision_offsets_enabled_flag : 1;
+ uint32_t persistent_rice_adaptation_enabled_flag : 1;
+ uint32_t cabac_bypass_alignment_enabled_flag : 1;
+ uint32_t sps_scc_extension_flag : 1;
+ uint32_t sps_curr_pic_ref_enabled_flag : 1;
+ uint32_t palette_mode_enabled_flag : 1;
+ uint32_t sps_palette_predictor_initializers_present_flag : 1;
+ uint32_t intra_boundary_filtering_disabled_flag : 1;
+ };
+
+ struct H265ShortTermRefPicSetFlags
+ {
+ using NativeType = StdVideoH265ShortTermRefPicSetFlags;
+
+ operator StdVideoH265ShortTermRefPicSetFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265ShortTermRefPicSetFlags *>( this );
+ }
+
+ operator StdVideoH265ShortTermRefPicSetFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265ShortTermRefPicSetFlags *>( this );
+ }
+
+ bool operator==( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( inter_ref_pic_set_prediction_flag == rhs.inter_ref_pic_set_prediction_flag ) && ( delta_rps_sign == rhs.delta_rps_sign );
+ }
+
+ bool operator!=( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t inter_ref_pic_set_prediction_flag : 1;
+ uint32_t delta_rps_sign : 1;
+ };
+
+ struct H265ShortTermRefPicSet
+ {
+ using NativeType = StdVideoH265ShortTermRefPicSet;
+
+ operator StdVideoH265ShortTermRefPicSet const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265ShortTermRefPicSet *>( this );
+ }
+
+ operator StdVideoH265ShortTermRefPicSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265ShortTermRefPicSet *>( this );
+ }
+
+ bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) &&
+ ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) &&
+ ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) &&
+ ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) &&
+ ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) &&
+ ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 );
+ }
+
+ bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSetFlags flags = {};
+ uint32_t delta_idx_minus1 = {};
+ uint16_t use_delta_flag = {};
+ uint16_t abs_delta_rps_minus1 = {};
+ uint16_t used_by_curr_pic_flag = {};
+ uint16_t used_by_curr_pic_s0_flag = {};
+ uint16_t used_by_curr_pic_s1_flag = {};
+ uint16_t reserved1 = {};
+ uint8_t reserved2 = {};
+ uint8_t reserved3 = {};
+ uint8_t num_negative_pics = {};
+ uint8_t num_positive_pics = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_MAX_DPB_SIZE> delta_poc_s0_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_MAX_DPB_SIZE> delta_poc_s1_minus1 = {};
+ };
+
+ struct H265LongTermRefPicsSps
+ {
+ using NativeType = StdVideoH265LongTermRefPicsSps;
+
+ operator StdVideoH265LongTermRefPicsSps const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265LongTermRefPicsSps *>( this );
+ }
+
+ operator StdVideoH265LongTermRefPicsSps &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265LongTermRefPicsSps *>( this );
+ }
+
+ bool operator==( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( used_by_curr_pic_lt_sps_flag == rhs.used_by_curr_pic_lt_sps_flag ) && ( lt_ref_pic_poc_lsb_sps == rhs.lt_ref_pic_poc_lsb_sps );
+ }
+
+ bool operator!=( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t used_by_curr_pic_lt_sps_flag = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS> lt_ref_pic_poc_lsb_sps = {};
+ };
+
+ struct H265SequenceParameterSet
+ {
+ using NativeType = StdVideoH265SequenceParameterSet;
+
+ operator StdVideoH265SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265SequenceParameterSet *>( this );
+ }
+
+ operator StdVideoH265SequenceParameterSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265SequenceParameterSet *>( this );
+ }
+
+ bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) &&
+ ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+ ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) &&
+ ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) &&
+ ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) &&
+ ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) &&
+ ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) &&
+ ( log2_min_luma_transform_block_size_minus2 == rhs.log2_min_luma_transform_block_size_minus2 ) &&
+ ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) &&
+ ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) &&
+ ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) &&
+ ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) &&
+ ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) &&
+ ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) &&
+ ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) &&
+ ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) &&
+ ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) &&
+ ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) &&
+ ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) &&
+ ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) &&
+ ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) &&
+ ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) &&
+ ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) &&
+ ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) &&
+ ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries );
+ }
+
+ bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc chroma_format_idc =
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc::eMonochrome;
+ uint32_t pic_width_in_luma_samples = {};
+ uint32_t pic_height_in_luma_samples = {};
+ uint8_t sps_video_parameter_set_id = {};
+ uint8_t sps_max_sub_layers_minus1 = {};
+ uint8_t sps_seq_parameter_set_id = {};
+ uint8_t bit_depth_luma_minus8 = {};
+ uint8_t bit_depth_chroma_minus8 = {};
+ uint8_t log2_max_pic_order_cnt_lsb_minus4 = {};
+ uint8_t log2_min_luma_coding_block_size_minus3 = {};
+ uint8_t log2_diff_max_min_luma_coding_block_size = {};
+ uint8_t log2_min_luma_transform_block_size_minus2 = {};
+ uint8_t log2_diff_max_min_luma_transform_block_size = {};
+ uint8_t max_transform_hierarchy_depth_inter = {};
+ uint8_t max_transform_hierarchy_depth_intra = {};
+ uint8_t num_short_term_ref_pic_sets = {};
+ uint8_t num_long_term_ref_pics_sps = {};
+ uint8_t pcm_sample_bit_depth_luma_minus1 = {};
+ uint8_t pcm_sample_bit_depth_chroma_minus1 = {};
+ uint8_t log2_min_pcm_luma_coding_block_size_minus3 = {};
+ uint8_t log2_diff_max_min_pcm_luma_coding_block_size = {};
+ uint8_t reserved1 = {};
+ uint8_t reserved2 = {};
+ uint8_t palette_max_size = {};
+ uint8_t delta_palette_max_predictor_size = {};
+ uint8_t motion_vector_resolution_control_idc = {};
+ uint8_t sps_num_palette_predictor_initializers_minus1 = {};
+ uint32_t conf_win_left_offset = {};
+ uint32_t conf_win_right_offset = {};
+ uint32_t conf_win_top_offset = {};
+ uint32_t conf_win_bottom_offset = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LongTermRefPicsSps * pLongTermRefPicsSps = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SequenceParameterSetVui * pSequenceParameterSetVui = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {};
+ };
+
+ struct H265PpsFlags
+ {
+ using NativeType = StdVideoH265PpsFlags;
+
+ operator StdVideoH265PpsFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265PpsFlags *>( this );
+ }
+
+ operator StdVideoH265PpsFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265PpsFlags *>( this );
+ }
+
+ bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) &&
+ ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) &&
+ ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) &&
+ ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) &&
+ ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) &&
+ ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) &&
+ ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) &&
+ ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) &&
+ ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) &&
+ ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) &&
+ ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) &&
+ ( deblocking_filter_override_enabled_flag == rhs.deblocking_filter_override_enabled_flag ) &&
+ ( pps_deblocking_filter_disabled_flag == rhs.pps_deblocking_filter_disabled_flag ) &&
+ ( pps_scaling_list_data_present_flag == rhs.pps_scaling_list_data_present_flag ) &&
+ ( lists_modification_present_flag == rhs.lists_modification_present_flag ) &&
+ ( slice_segment_header_extension_present_flag == rhs.slice_segment_header_extension_present_flag ) &&
+ ( pps_extension_present_flag == rhs.pps_extension_present_flag ) &&
+ ( cross_component_prediction_enabled_flag == rhs.cross_component_prediction_enabled_flag ) &&
+ ( chroma_qp_offset_list_enabled_flag == rhs.chroma_qp_offset_list_enabled_flag ) &&
+ ( pps_curr_pic_ref_enabled_flag == rhs.pps_curr_pic_ref_enabled_flag ) &&
+ ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) &&
+ ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) &&
+ ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) &&
+ ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag );
+ }
+
+ bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t dependent_slice_segments_enabled_flag : 1;
+ uint32_t output_flag_present_flag : 1;
+ uint32_t sign_data_hiding_enabled_flag : 1;
+ uint32_t cabac_init_present_flag : 1;
+ uint32_t constrained_intra_pred_flag : 1;
+ uint32_t transform_skip_enabled_flag : 1;
+ uint32_t cu_qp_delta_enabled_flag : 1;
+ uint32_t pps_slice_chroma_qp_offsets_present_flag : 1;
+ uint32_t weighted_pred_flag : 1;
+ uint32_t weighted_bipred_flag : 1;
+ uint32_t transquant_bypass_enabled_flag : 1;
+ uint32_t tiles_enabled_flag : 1;
+ uint32_t entropy_coding_sync_enabled_flag : 1;
+ uint32_t uniform_spacing_flag : 1;
+ uint32_t loop_filter_across_tiles_enabled_flag : 1;
+ uint32_t pps_loop_filter_across_slices_enabled_flag : 1;
+ uint32_t deblocking_filter_control_present_flag : 1;
+ uint32_t deblocking_filter_override_enabled_flag : 1;
+ uint32_t pps_deblocking_filter_disabled_flag : 1;
+ uint32_t pps_scaling_list_data_present_flag : 1;
+ uint32_t lists_modification_present_flag : 1;
+ uint32_t slice_segment_header_extension_present_flag : 1;
+ uint32_t pps_extension_present_flag : 1;
+ uint32_t cross_component_prediction_enabled_flag : 1;
+ uint32_t chroma_qp_offset_list_enabled_flag : 1;
+ uint32_t pps_curr_pic_ref_enabled_flag : 1;
+ uint32_t residual_adaptive_colour_transform_enabled_flag : 1;
+ uint32_t pps_slice_act_qp_offsets_present_flag : 1;
+ uint32_t pps_palette_predictor_initializers_present_flag : 1;
+ uint32_t monochrome_palette_flag : 1;
+ uint32_t pps_range_extension_flag : 1;
+ };
+
+ struct H265PictureParameterSet
+ {
+ using NativeType = StdVideoH265PictureParameterSet;
+
+ operator StdVideoH265PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoH265PictureParameterSet *>( this );
+ }
+
+ operator StdVideoH265PictureParameterSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoH265PictureParameterSet *>( this );
+ }
+
+ bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) &&
+ ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+ ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) &&
+ ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) &&
+ ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) &&
+ ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) &&
+ ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) &&
+ ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) &&
+ ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) &&
+ ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) &&
+ ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) &&
+ ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) &&
+ ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) &&
+ ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) &&
+ ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) &&
+ ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) &&
+ ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) &&
+ ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) &&
+ ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) &&
+ ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries );
+ }
+
+ bool operator!=( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PpsFlags flags = {};
+ uint8_t pps_pic_parameter_set_id = {};
+ uint8_t pps_seq_parameter_set_id = {};
+ uint8_t sps_video_parameter_set_id = {};
+ uint8_t num_extra_slice_header_bits = {};
+ uint8_t num_ref_idx_l0_default_active_minus1 = {};
+ uint8_t num_ref_idx_l1_default_active_minus1 = {};
+ int8_t init_qp_minus26 = {};
+ uint8_t diff_cu_qp_delta_depth = {};
+ int8_t pps_cb_qp_offset = {};
+ int8_t pps_cr_qp_offset = {};
+ int8_t pps_beta_offset_div2 = {};
+ int8_t pps_tc_offset_div2 = {};
+ uint8_t log2_parallel_merge_level_minus2 = {};
+ uint8_t log2_max_transform_skip_block_size_minus2 = {};
+ uint8_t diff_cu_chroma_qp_offset_depth = {};
+ uint8_t chroma_qp_offset_list_len_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE> cb_qp_offset_list = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE> cr_qp_offset_list = {};
+ uint8_t log2_sao_offset_scale_luma = {};
+ uint8_t log2_sao_offset_scale_chroma = {};
+ int8_t pps_act_y_qp_offset_plus5 = {};
+ int8_t pps_act_cb_qp_offset_plus5 = {};
+ int8_t pps_act_cr_qp_offset_plus3 = {};
+ uint8_t pps_num_palette_predictor_initializers = {};
+ uint8_t luma_bit_depth_entry_minus8 = {};
+ uint8_t chroma_bit_depth_entry_minus8 = {};
+ uint8_t num_tile_columns_minus1 = {};
+ uint8_t num_tile_rows_minus1 = {};
+ uint8_t reserved1 = {};
+ uint8_t reserved2 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE> column_width_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE> row_height_minus1 = {};
+ uint32_t reserved3 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {};
+ };
+
+ //=== vulkan_video_codec_h265std_decode ===
+
+ struct DecodeH265PictureInfoFlags
+ {
+ using NativeType = StdVideoDecodeH265PictureInfoFlags;
+
+ operator StdVideoDecodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH265PictureInfoFlags *>( this );
+ }
+
+ operator StdVideoDecodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH265PictureInfoFlags *>( this );
+ }
+
+ bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) &&
+ ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag );
+ }
+
+ bool operator!=( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t IrapPicFlag : 1;
+ uint32_t IdrPicFlag : 1;
+ uint32_t IsReference : 1;
+ uint32_t short_term_ref_pic_set_sps_flag : 1;
+ };
+
+ struct DecodeH265PictureInfo
+ {
+ using NativeType = StdVideoDecodeH265PictureInfo;
+
+ operator StdVideoDecodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH265PictureInfo *>( this );
+ }
+
+ operator StdVideoDecodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH265PictureInfo *>( this );
+ }
+
+ bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+ ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) &&
+ ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) &&
+ ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) &&
+ ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) &&
+ ( RefPicSetLtCurr == rhs.RefPicSetLtCurr );
+ }
+
+ bool operator!=( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265PictureInfoFlags flags = {};
+ uint8_t sps_video_parameter_set_id = {};
+ uint8_t pps_seq_parameter_set_id = {};
+ uint8_t pps_pic_parameter_set_id = {};
+ uint8_t NumDeltaPocsOfRefRpsIdx = {};
+ int32_t PicOrderCntVal = {};
+ uint16_t NumBitsForSTRefPicSetInSlice = {};
+ uint16_t reserved = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE> RefPicSetStCurrBefore = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE> RefPicSetStCurrAfter = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE> RefPicSetLtCurr = {};
+ };
+
+ struct DecodeH265ReferenceInfoFlags
+ {
+ using NativeType = StdVideoDecodeH265ReferenceInfoFlags;
+
+ operator StdVideoDecodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH265ReferenceInfoFlags *>( this );
+ }
+
+ operator StdVideoDecodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH265ReferenceInfoFlags *>( this );
+ }
+
+ bool operator==( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference );
+ }
+
+ bool operator!=( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t unused_for_reference : 1;
+ };
+
+ struct DecodeH265ReferenceInfo
+ {
+ using NativeType = StdVideoDecodeH265ReferenceInfo;
+
+ operator StdVideoDecodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoDecodeH265ReferenceInfo *>( this );
+ }
+
+ operator StdVideoDecodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoDecodeH265ReferenceInfo *>( this );
+ }
+
+ bool operator==( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( PicOrderCntVal == rhs.PicOrderCntVal );
+ }
+
+ bool operator!=( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265ReferenceInfoFlags flags = {};
+ int32_t PicOrderCntVal = {};
+ };
+
+ //=== vulkan_video_codec_h265std_encode ===
+
+ struct EncodeH265WeightTableFlags
+ {
+ using NativeType = StdVideoEncodeH265WeightTableFlags;
+
+ operator StdVideoEncodeH265WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265WeightTableFlags *>( this );
+ }
+
+ operator StdVideoEncodeH265WeightTableFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265WeightTableFlags *>( this );
+ }
+
+ bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) &&
+ ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag );
+ }
+
+ bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint16_t luma_weight_l0_flag = {};
+ uint16_t chroma_weight_l0_flag = {};
+ uint16_t luma_weight_l1_flag = {};
+ uint16_t chroma_weight_l1_flag = {};
+ };
+
+ struct EncodeH265WeightTable
+ {
+ using NativeType = StdVideoEncodeH265WeightTable;
+
+ operator StdVideoEncodeH265WeightTable const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265WeightTable *>( this );
+ }
+
+ operator StdVideoEncodeH265WeightTable &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265WeightTable *>( this );
+ }
+
+ bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) &&
+ ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) &&
+ ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) &&
+ ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) &&
+ ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) &&
+ ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 );
+ }
+
+ bool operator!=( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTableFlags flags = {};
+ uint8_t luma_log2_weight_denom = {};
+ int8_t delta_chroma_log2_weight_denom = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> delta_luma_weight_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> luma_offset_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_weight_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_offset_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> delta_luma_weight_l1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> luma_offset_l1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_weight_l1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_offset_l1 = {};
+ };
+
+ struct EncodeH265SliceSegmentHeaderFlags
+ {
+ using NativeType = StdVideoEncodeH265SliceSegmentHeaderFlags;
+
+ operator StdVideoEncodeH265SliceSegmentHeaderFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265SliceSegmentHeaderFlags *>( this );
+ }
+
+ operator StdVideoEncodeH265SliceSegmentHeaderFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265SliceSegmentHeaderFlags *>( this );
+ }
+
+ bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) &&
+ ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) &&
+ ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) &&
+ ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) &&
+ ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) &&
+ ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) &&
+ ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) &&
+ ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) &&
+ ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t first_slice_segment_in_pic_flag : 1;
+ uint32_t dependent_slice_segment_flag : 1;
+ uint32_t slice_sao_luma_flag : 1;
+ uint32_t slice_sao_chroma_flag : 1;
+ uint32_t num_ref_idx_active_override_flag : 1;
+ uint32_t mvd_l1_zero_flag : 1;
+ uint32_t cabac_init_flag : 1;
+ uint32_t cu_chroma_qp_offset_enabled_flag : 1;
+ uint32_t deblocking_filter_override_flag : 1;
+ uint32_t slice_deblocking_filter_disabled_flag : 1;
+ uint32_t collocated_from_l0_flag : 1;
+ uint32_t slice_loop_filter_across_slices_enabled_flag : 1;
+ uint32_t reserved : 20;
+ };
+
+ struct EncodeH265SliceSegmentHeader
+ {
+ using NativeType = StdVideoEncodeH265SliceSegmentHeader;
+
+ operator StdVideoEncodeH265SliceSegmentHeader const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265SliceSegmentHeader *>( this );
+ }
+
+ operator StdVideoEncodeH265SliceSegmentHeader &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265SliceSegmentHeader *>( this );
+ }
+
+ bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) &&
+ ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) &&
+ ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) &&
+ ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) &&
+ ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) &&
+ ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( reserved1 == rhs.reserved1 ) && ( pWeightTable == rhs.pWeightTable );
+ }
+
+ bool operator!=( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentHeaderFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType::eB;
+ uint32_t slice_segment_address = {};
+ uint8_t collocated_ref_idx = {};
+ uint8_t MaxNumMergeCand = {};
+ int8_t slice_cb_qp_offset = {};
+ int8_t slice_cr_qp_offset = {};
+ int8_t slice_beta_offset_div2 = {};
+ int8_t slice_tc_offset_div2 = {};
+ int8_t slice_act_y_qp_offset = {};
+ int8_t slice_act_cb_qp_offset = {};
+ int8_t slice_act_cr_qp_offset = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 3> reserved1 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTable * pWeightTable = {};
+ };
+
+ struct EncodeH265ReferenceListsInfoFlags
+ {
+ using NativeType = StdVideoEncodeH265ReferenceListsInfoFlags;
+
+ operator StdVideoEncodeH265ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265ReferenceListsInfoFlags *>( this );
+ }
+
+ operator StdVideoEncodeH265ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265ReferenceListsInfoFlags *>( this );
+ }
+
+ bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) &&
+ ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t ref_pic_list_modification_flag_l0 : 1;
+ uint32_t ref_pic_list_modification_flag_l1 : 1;
+ uint32_t reserved : 30;
+ };
+
+ struct EncodeH265ReferenceListsInfo
+ {
+ using NativeType = StdVideoEncodeH265ReferenceListsInfo;
+
+ operator StdVideoEncodeH265ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265ReferenceListsInfo *>( this );
+ }
+
+ operator StdVideoEncodeH265ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265ReferenceListsInfo *>( this );
+ }
+
+ bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) &&
+ ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) &&
+ ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 );
+ }
+
+ bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfoFlags flags = {};
+ uint8_t num_ref_idx_l0_active_minus1 = {};
+ uint8_t num_ref_idx_l1_active_minus1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> RefPicList0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> RefPicList1 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> list_entry_l0 = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF> list_entry_l1 = {};
+ };
+
+ struct EncodeH265PictureInfoFlags
+ {
+ using NativeType = StdVideoEncodeH265PictureInfoFlags;
+
+ operator StdVideoEncodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265PictureInfoFlags *>( this );
+ }
+
+ operator StdVideoEncodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265PictureInfoFlags *>( this );
+ }
+
+ bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) &&
+ ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) &&
+ ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) &&
+ ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) &&
+ ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) &&
+ ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t is_reference : 1;
+ uint32_t IrapPicFlag : 1;
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t discardable_flag : 1;
+ uint32_t cross_layer_bla_flag : 1;
+ uint32_t pic_output_flag : 1;
+ uint32_t no_output_of_prior_pics_flag : 1;
+ uint32_t short_term_ref_pic_set_sps_flag : 1;
+ uint32_t slice_temporal_mvp_enabled_flag : 1;
+ uint32_t reserved : 23;
+ };
+
+ struct EncodeH265SliceSegmentLongTermRefPics
+ {
+ using NativeType = StdVideoEncodeH265SliceSegmentLongTermRefPics;
+
+ operator StdVideoEncodeH265SliceSegmentLongTermRefPics const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265SliceSegmentLongTermRefPics *>( this );
+ }
+
+ operator StdVideoEncodeH265SliceSegmentLongTermRefPics &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265SliceSegmentLongTermRefPics *>( this );
+ }
+
+ bool operator==( EncodeH265SliceSegmentLongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) &&
+ ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) &&
+ ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt );
+ }
+
+ bool operator!=( EncodeH265SliceSegmentLongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint8_t num_long_term_sps = {};
+ uint8_t num_long_term_pics = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS> lt_idx_sps = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_LONG_TERM_PICS> poc_lsb_lt = {};
+ uint16_t used_by_curr_pic_lt_flag = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_DELTA_POC> delta_poc_msb_present_flag = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_DELTA_POC> delta_poc_msb_cycle_lt = {};
+ };
+
+ struct EncodeH265PictureInfo
+ {
+ using NativeType = StdVideoEncodeH265PictureInfo;
+
+ operator StdVideoEncodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265PictureInfo *>( this );
+ }
+
+ operator StdVideoEncodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265PictureInfo *>( this );
+ }
+
+ bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+ ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) &&
+ ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) &&
+ ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) &&
+ ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics );
+ }
+
+ bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265PictureInfoFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP;
+ uint8_t sps_video_parameter_set_id = {};
+ uint8_t pps_seq_parameter_set_id = {};
+ uint8_t pps_pic_parameter_set_id = {};
+ uint8_t short_term_ref_pic_set_idx = {};
+ int32_t PicOrderCntVal = {};
+ uint8_t TemporalId = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 7> reserved1 = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfo * pRefLists = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {};
+ const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentLongTermRefPics * pLongTermRefPics = {};
+ };
+
+ struct EncodeH265ReferenceInfoFlags
+ {
+ using NativeType = StdVideoEncodeH265ReferenceInfoFlags;
+
+ operator StdVideoEncodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265ReferenceInfoFlags *>( this );
+ }
+
+ operator StdVideoEncodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265ReferenceInfoFlags *>( this );
+ }
+
+ bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) &&
+ ( reserved == rhs.reserved );
+ }
+
+ bool operator!=( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t unused_for_reference : 1;
+ uint32_t reserved : 30;
+ };
+
+ struct EncodeH265ReferenceInfo
+ {
+ using NativeType = StdVideoEncodeH265ReferenceInfo;
+
+ operator StdVideoEncodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const StdVideoEncodeH265ReferenceInfo *>( this );
+ }
+
+ operator StdVideoEncodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<StdVideoEncodeH265ReferenceInfo *>( this );
+ }
+
+ bool operator==( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && ( TemporalId == rhs.TemporalId );
+ }
+
+ bool operator!=( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceInfoFlags flags = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP;
+ int32_t PicOrderCntVal = {};
+ uint8_t TemporalId = {};
+ };
+
+ } // namespace VULKAN_HPP_VIDEO_NAMESPACE
+} // namespace VULKAN_HPP_NAMESPACE
+#endif