summaryrefslogtreecommitdiffhomepage
path: root/include
diff options
context:
space:
mode:
authorJon Leech <[email protected]>2023-07-28 04:40:18 -0700
committerJon Leech <[email protected]>2023-07-28 04:50:00 -0700
commit94bb3c998b9156b9101421f7614617dfcf7f4256 (patch)
treef0e26d08fae2f2be5d61ebd94d3e7b0536a7d3e2 /include
parentcb7b123f2ddc04b86fd106c3a2b2e9872e8215b5 (diff)
downloadVulkan-Headers-94bb3c998b9156b9101421f7614617dfcf7f4256.tar.gz
Vulkan-Headers-94bb3c998b9156b9101421f7614617dfcf7f4256.zip
Update for Vulkan-Docs 1.3.260v1.3.260
Diffstat (limited to 'include')
-rw-r--r--include/vulkan/vulkan.cppm135
-rw-r--r--include/vulkan/vulkan.hpp414
-rw-r--r--include/vulkan/vulkan_beta.h113
-rw-r--r--include/vulkan/vulkan_core.h222
-rw-r--r--include/vulkan/vulkan_enums.hpp193
-rw-r--r--include/vulkan/vulkan_extension_inspection.hpp20
-rw-r--r--include/vulkan/vulkan_format_traits.hpp54
-rw-r--r--include/vulkan/vulkan_funcs.hpp465
-rw-r--r--include/vulkan/vulkan_handles.hpp214
-rw-r--r--include/vulkan/vulkan_hash.hpp247
-rw-r--r--include/vulkan/vulkan_raii.hpp383
-rw-r--r--include/vulkan/vulkan_static_assertions.hpp118
-rw-r--r--include/vulkan/vulkan_structs.hpp2054
-rw-r--r--include/vulkan/vulkan_to_string.hpp252
14 files changed, 4454 insertions, 430 deletions
diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm
index e5bd813..cc693b8 100644
--- a/include/vulkan/vulkan.cppm
+++ b/include/vulkan/vulkan.cppm
@@ -800,6 +800,12 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV;
using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV;
+ //=== VK_KHR_maintenance5 ===
+ using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR;
+ using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR;
+ using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR;
+ using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR;
+
//=== VK_EXT_shader_object ===
using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT;
using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT;
@@ -888,50 +894,99 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::ResultValue;
using VULKAN_HPP_NAMESPACE::ResultValueType;
- //=========================================
- //=== CONSTEXPR CONSTANTs AND FUNCTIONs ===
- //=========================================
- using VULKAN_HPP_NAMESPACE::ApiVersion;
- using VULKAN_HPP_NAMESPACE::ApiVersion10;
- using VULKAN_HPP_NAMESPACE::ApiVersion11;
- using VULKAN_HPP_NAMESPACE::ApiVersion12;
- using VULKAN_HPP_NAMESPACE::ApiVersion13;
- using VULKAN_HPP_NAMESPACE::apiVersionMajor;
- using VULKAN_HPP_NAMESPACE::apiVersionMinor;
- using VULKAN_HPP_NAMESPACE::apiVersionPatch;
- using VULKAN_HPP_NAMESPACE::apiVersionVariant;
+ //===========================
+ //=== CONSTEXPR CONSTANTs ===
+ //===========================
+
+ //=== VK_VERSION_1_0 ===
using VULKAN_HPP_NAMESPACE::AttachmentUnused;
using VULKAN_HPP_NAMESPACE::False;
- using VULKAN_HPP_NAMESPACE::HeaderVersion;
- using VULKAN_HPP_NAMESPACE::HeaderVersionComplete;
using VULKAN_HPP_NAMESPACE::LodClampNone;
- using VULKAN_HPP_NAMESPACE::LuidSize;
- using VULKAN_HPP_NAMESPACE::makeApiVersion;
- using VULKAN_HPP_NAMESPACE::makeVersion;
using VULKAN_HPP_NAMESPACE::MaxDescriptionSize;
- using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize;
- using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize;
- using VULKAN_HPP_NAMESPACE::MaxDriverNameSize;
using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize;
- using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKhr;
using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps;
using VULKAN_HPP_NAMESPACE::MaxMemoryTypes;
using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize;
- using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeExt;
- using VULKAN_HPP_NAMESPACE::QueueFamilyExternal;
- using VULKAN_HPP_NAMESPACE::QueueFamilyForeignExt;
using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored;
- using VULKAN_HPP_NAMESPACE::Remaining3DSlicesExt;
using VULKAN_HPP_NAMESPACE::RemainingArrayLayers;
using VULKAN_HPP_NAMESPACE::RemainingMipLevels;
- using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr;
using VULKAN_HPP_NAMESPACE::SubpassExternal;
using VULKAN_HPP_NAMESPACE::True;
using VULKAN_HPP_NAMESPACE::UuidSize;
+ using VULKAN_HPP_NAMESPACE::WholeSize;
+
+ //=== VK_VERSION_1_1 ===
+ using VULKAN_HPP_NAMESPACE::LuidSize;
+ using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize;
+ using VULKAN_HPP_NAMESPACE::QueueFamilyExternal;
+
+ //=== VK_VERSION_1_2 ===
+ using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize;
+ using VULKAN_HPP_NAMESPACE::MaxDriverNameSize;
+
+ //=== VK_KHR_device_group_creation ===
+ using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKhr;
+
+ //=== VK_KHR_external_memory_capabilities ===
+ using VULKAN_HPP_NAMESPACE::LuidSizeKhr;
+
+ //=== VK_KHR_external_memory ===
+ using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKhr;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAmdx;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_KHR_ray_tracing_pipeline ===
+ using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr;
+
+ //=== VK_NV_ray_tracing ===
+ using VULKAN_HPP_NAMESPACE::ShaderUnusedNv;
+
+ //=== VK_KHR_global_priority ===
+ using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKhr;
+
+ //=== VK_KHR_driver_properties ===
+ using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKhr;
+ using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKhr;
+
+ //=== VK_EXT_global_priority_query ===
+ using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeExt;
+
+ //=== VK_EXT_image_sliced_view_of_3d ===
+ using VULKAN_HPP_NAMESPACE::Remaining3DSlicesExt;
+
+ //=== VK_EXT_shader_module_identifier ===
+ using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeExt;
+
+ //========================
+ //=== CONSTEXPR VALUEs ===
+ //========================
+ using VULKAN_HPP_NAMESPACE::HeaderVersion;
+
+ //=========================
+ //=== CONSTEXPR CALLEEs ===
+ //=========================
+ using VULKAN_HPP_NAMESPACE::apiVersionMajor;
+ using VULKAN_HPP_NAMESPACE::apiVersionMinor;
+ using VULKAN_HPP_NAMESPACE::apiVersionPatch;
+ using VULKAN_HPP_NAMESPACE::apiVersionVariant;
+ using VULKAN_HPP_NAMESPACE::makeApiVersion;
+ using VULKAN_HPP_NAMESPACE::makeVersion;
using VULKAN_HPP_NAMESPACE::versionMajor;
using VULKAN_HPP_NAMESPACE::versionMinor;
using VULKAN_HPP_NAMESPACE::versionPatch;
- using VULKAN_HPP_NAMESPACE::WholeSize;
+
+ //==========================
+ //=== CONSTEXPR CALLERSs ===
+ //==========================
+ using VULKAN_HPP_NAMESPACE::ApiVersion;
+ using VULKAN_HPP_NAMESPACE::ApiVersion10;
+ using VULKAN_HPP_NAMESPACE::ApiVersion11;
+ using VULKAN_HPP_NAMESPACE::ApiVersion12;
+ using VULKAN_HPP_NAMESPACE::ApiVersion13;
+ using VULKAN_HPP_NAMESPACE::HeaderVersionComplete;
//===============
//=== STRUCTs ===
@@ -1752,6 +1807,18 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX;
+ using VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX;
+ using VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX;
+ using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX;
+ using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX;
+ using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT;
using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT;
@@ -2070,13 +2137,11 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT;
using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT;
using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT;
- using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT;
using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT;
using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT;
- using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
//=== VK_KHR_map_memory2 ===
using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR;
@@ -2521,6 +2586,18 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_pipeline_protected_access ===
using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ //=== VK_KHR_maintenance5 ===
+ using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR;
+ using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR;
+ using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
+ using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR;
+ using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR;
+ using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR;
+ using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
+ using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR;
+
//=== VK_KHR_ray_tracing_position_fetch ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
index 94968e9..8f46fc8 100644
--- a/include/vulkan/vulkan.hpp
+++ b/include/vulkan/vulkan.hpp
@@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
-static_assert( VK_HEADER_VERSION == 259, "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 260, "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -3991,6 +3991,59 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device,
+ VkPipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
+ const VkAllocationCallbacks * pAllocator,
+ VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ }
+
+ VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device,
+ VkPipeline executionGraph,
+ VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo );
+ }
+
+ VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device,
+ VkPipeline executionGraph,
+ const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
+ uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex );
+ }
+
+ void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, scratch );
+ }
+
+ void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer,
+ VkDeviceAddress scratch,
+ const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, pCountInfo );
+ }
+
+ void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer,
+ VkDeviceAddress scratch,
+ const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, pCountInfo );
+ }
+
+ void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, countInfo );
+ }
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
@@ -4937,8 +4990,8 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetImageSubresourceLayout2EXT( VkDevice device,
VkImage image,
- const VkImageSubresource2EXT * pSubresource,
- VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
+ const VkImageSubresource2KHR * pSubresource,
+ VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
}
@@ -5964,6 +6017,36 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
}
+ //=== VK_KHR_maintenance5 ===
+
+ void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType );
+ }
+
+ void vkGetRenderingAreaGranularityKHR( VkDevice device,
+ const VkRenderingAreaInfoKHR * pRenderingAreaInfo,
+ VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity );
+ }
+
+ void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device,
+ const VkDeviceImageSubresourceInfoKHR * pInfo,
+ VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout );
+ }
+
+ void vkGetImageSubresourceLayout2KHR( VkDevice device,
+ VkImage image,
+ const VkImageSubresource2KHR * pSubresource,
+ VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout );
+ }
+
//=== VK_EXT_shader_object ===
VkResult vkCreateShadersEXT( VkDevice device,
@@ -6920,35 +7003,80 @@ namespace VULKAN_HPP_NAMESPACE
#endif
}
- //=========================================
- //=== CONSTEXPR CONSTANTs AND FUNCTIONs ===
- //=========================================
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE;
- VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE;
+ //===========================
+ //=== CONSTEXPR CONSTANTs ===
+ //===========================
+
+ //=== VK_VERSION_1_0 ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE;
+ VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS;
+
+ //=== VK_VERSION_1_1 ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL;
+
+ //=== VK_VERSION_1_2 ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE;
+
+ //=== VK_KHR_device_group_creation ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKhr = VK_MAX_DEVICE_GROUP_SIZE_KHR;
+
+ //=== VK_KHR_external_memory_capabilities ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKhr = VK_LUID_SIZE_KHR;
+
+ //=== VK_KHR_external_memory ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKhr = VK_QUEUE_FAMILY_EXTERNAL_KHR;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAmdx = VK_SHADER_INDEX_UNUSED_AMDX;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_KHR_ray_tracing_pipeline ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR;
+
+ //=== VK_NV_ray_tracing ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNv = VK_SHADER_UNUSED_NV;
+
+ //=== VK_KHR_global_priority ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR;
+
+ //=== VK_KHR_driver_properties ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKhr = VK_MAX_DRIVER_NAME_SIZE_KHR;
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKhr = VK_MAX_DRIVER_INFO_SIZE_KHR;
+
+ //=== VK_EXT_global_priority_query ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeExt = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT;
+
+ //=== VK_EXT_image_sliced_view_of_3d ===
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT;
+
+ //=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE;
- VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION;
+
+ //========================
+ //=== CONSTEXPR VALUEs ===
+ //========================
+ VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION;
+
+ //=========================
+ //=== CONSTEXPR CALLEEs ===
+ //=========================
template <typename T, typename = typename std::enable_if<std::is_integral<T>::value>::type>
VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version )
{
@@ -6998,6 +7126,10 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( ( uint32_t )(version)&0xFFFU );
}
+
+ //=========================
+ //=== CONSTEXPR CALLERs ===
+ //=========================
VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 );
VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 );
VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 );
@@ -7932,6 +8064,16 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct StructExtends<PipelineCreationFeedbackCreateInfo, ExecutionGraphPipelineCreateInfoAMDX>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <>
struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, PhysicalDeviceFeatures2>
{
@@ -9375,6 +9517,42 @@ namespace VULKAN_HPP_NAMESPACE
};
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ template <>
+ struct StructExtends<PhysicalDeviceShaderEnqueueFeaturesAMDX, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderEnqueueFeaturesAMDX, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderEnqueuePropertiesAMDX, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PipelineShaderStageNodeCreateInfoAMDX, PipelineShaderStageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
template <>
struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>
@@ -9820,6 +9998,16 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct StructExtends<PipelineCompilerControlCreateInfoAMD, ExecutionGraphPipelineCreateInfoAMDX>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_AMD_shader_core_properties ===
template <>
@@ -10679,7 +10867,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<SubresourceHostMemcpySizeEXT, SubresourceLayout2EXT>
+ struct StructExtends<SubresourceHostMemcpySizeEXT, SubresourceLayout2KHR>
{
enum
{
@@ -11755,7 +11943,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2EXT>
+ struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2KHR>
{
enum
{
@@ -12941,6 +13129,96 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_KHR_maintenance5 ===
+ template <>
+ struct StructExtends<PhysicalDeviceMaintenance5FeaturesKHR, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceMaintenance5FeaturesKHR, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceMaintenance5PropertiesKHR, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PipelineCreateFlags2CreateInfoKHR, ComputePipelineCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PipelineCreateFlags2CreateInfoKHR, GraphicsPipelineCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PipelineCreateFlags2CreateInfoKHR, RayTracingPipelineCreateInfoNV>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PipelineCreateFlags2CreateInfoKHR, RayTracingPipelineCreateInfoKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<BufferUsageFlags2CreateInfoKHR, BufferViewCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<BufferUsageFlags2CreateInfoKHR, BufferCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<BufferUsageFlags2CreateInfoKHR, PhysicalDeviceExternalBufferInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<BufferUsageFlags2CreateInfoKHR, DescriptorBufferBindingInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_KHR_ray_tracing_position_fetch ===
template <>
struct StructExtends<PhysicalDeviceRayTracingPositionFetchFeaturesKHR, PhysicalDeviceFeatures2>
@@ -13966,6 +14244,25 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0;
+ PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0;
+ PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0;
+ PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0;
+ PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0;
+ PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0;
+ PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0;
+#else
+ PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0;
+ PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0;
+ PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0;
+ PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0;
+ PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0;
+ PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0;
+ PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
@@ -14450,6 +14747,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+ //=== VK_KHR_maintenance5 ===
+ PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0;
+ PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0;
+ PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0;
+ PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0;
+
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
@@ -15145,6 +15448,20 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) );
+ vkGetExecutionGraphPipelineScratchSizeAMDX =
+ PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
+ vkGetExecutionGraphPipelineNodeIndexAMDX =
+ PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
+ vkCmdInitializeGraphScratchMemoryAMDX =
+ PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
+ vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) );
+ vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) );
+ vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
vkGetPhysicalDeviceMultisamplePropertiesEXT =
@@ -15449,6 +15766,8 @@ namespace VULKAN_HPP_NAMESPACE
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) );
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) );
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
+ if ( !vkGetImageSubresourceLayout2KHR )
+ vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) );
@@ -15769,6 +16088,13 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
+ //=== VK_KHR_maintenance5 ===
+ vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) );
+ vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) );
+ vkGetDeviceImageSubresourceLayoutKHR =
+ PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) );
+ vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) );
+
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) );
@@ -16202,6 +16528,20 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) );
+ vkGetExecutionGraphPipelineScratchSizeAMDX =
+ PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
+ vkGetExecutionGraphPipelineNodeIndexAMDX =
+ PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
+ vkCmdInitializeGraphScratchMemoryAMDX =
+ PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
+ vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) );
+ vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) );
+ vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
@@ -16466,6 +16806,8 @@ namespace VULKAN_HPP_NAMESPACE
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+ if ( !vkGetImageSubresourceLayout2KHR )
+ vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
@@ -16754,6 +17096,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+ //=== VK_KHR_maintenance5 ===
+ vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) );
+ vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) );
+ vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) );
+ vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) );
+
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h
index 75fabd4..f00ed3f 100644
--- a/include/vulkan/vulkan_beta.h
+++ b/include/vulkan/vulkan_beta.h
@@ -643,6 +643,119 @@ typedef struct VkVideoEncodeH265GopRemainingFrameInfoEXT {
+// VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls.
+#define VK_AMDX_shader_enqueue 1
+#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 1
+#define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue"
+#define VK_SHADER_INDEX_UNUSED_AMDX (~0U)
+typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderEnqueue;
+} VkPhysicalDeviceShaderEnqueueFeaturesAMDX;
+
+typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxExecutionGraphDepth;
+ uint32_t maxExecutionGraphShaderOutputNodes;
+ uint32_t maxExecutionGraphShaderPayloadSize;
+ uint32_t maxExecutionGraphShaderPayloadCount;
+ uint32_t executionGraphDispatchAddressAlignment;
+} VkPhysicalDeviceShaderEnqueuePropertiesAMDX;
+
+typedef struct VkExecutionGraphPipelineScratchSizeAMDX {
+ VkStructureType sType;
+ void* pNext;
+ VkDeviceSize size;
+} VkExecutionGraphPipelineScratchSizeAMDX;
+
+typedef struct VkExecutionGraphPipelineCreateInfoAMDX {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineCreateFlags flags;
+ uint32_t stageCount;
+ const VkPipelineShaderStageCreateInfo* pStages;
+ const VkPipelineLibraryCreateInfoKHR* pLibraryInfo;
+ VkPipelineLayout layout;
+ VkPipeline basePipelineHandle;
+ int32_t basePipelineIndex;
+} VkExecutionGraphPipelineCreateInfoAMDX;
+
+typedef union VkDeviceOrHostAddressConstAMDX {
+ VkDeviceAddress deviceAddress;
+ const void* hostAddress;
+} VkDeviceOrHostAddressConstAMDX;
+
+typedef struct VkDispatchGraphInfoAMDX {
+ uint32_t nodeIndex;
+ uint32_t payloadCount;
+ VkDeviceOrHostAddressConstAMDX payloads;
+ uint64_t payloadStride;
+} VkDispatchGraphInfoAMDX;
+
+typedef struct VkDispatchGraphCountInfoAMDX {
+ uint32_t count;
+ VkDeviceOrHostAddressConstAMDX infos;
+ uint64_t stride;
+} VkDispatchGraphCountInfoAMDX;
+
+typedef struct VkPipelineShaderStageNodeCreateInfoAMDX {
+ VkStructureType sType;
+ const void* pNext;
+ const char* pName;
+ uint32_t index;
+} VkPipelineShaderStageNodeCreateInfoAMDX;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex);
+typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX(
+ VkDevice device,
+ VkPipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos,
+ const VkAllocationCallbacks* pAllocator,
+ VkPipeline* pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineScratchSizeAMDX(
+ VkDevice device,
+ VkPipeline executionGraph,
+ VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX(
+ VkDevice device,
+ VkPipeline executionGraph,
+ const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo,
+ uint32_t* pNodeIndex);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX(
+ VkCommandBuffer commandBuffer,
+ VkDeviceAddress scratch);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX(
+ VkCommandBuffer commandBuffer,
+ VkDeviceAddress scratch,
+ const VkDispatchGraphCountInfoAMDX* pCountInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX(
+ VkCommandBuffer commandBuffer,
+ VkDeviceAddress scratch,
+ const VkDispatchGraphCountInfoAMDX* pCountInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX(
+ VkCommandBuffer commandBuffer,
+ VkDeviceAddress scratch,
+ VkDeviceAddress countInfo);
+#endif
+
+
// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_displacement_micromap 1
#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
index 50a257b..78b0da7 100644
--- a/include/vulkan/vulkan_core.h
+++ b/include/vulkan/vulkan_core.h
@@ -69,7 +69,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 259
+#define VK_HEADER_VERSION 260
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@@ -646,6 +646,21 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004,
VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005,
VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX = 1000134000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX = 1000134001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX = 1000134002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX = 1000134003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004,
+#endif
VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000,
VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001,
VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002,
@@ -937,8 +952,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000,
VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001,
- VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = 1000338002,
- VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = 1000338003,
VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000,
@@ -1065,6 +1078,14 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = 1000470000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = 1000470001,
+ VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = 1000470003,
+ VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = 1000470004,
+ VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = 1000338002,
+ VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = 1000338003,
+ VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = 1000470005,
+ VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = 1000470006,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001,
@@ -1255,6 +1276,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+ VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR,
+ VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
@@ -1652,6 +1675,8 @@ typedef enum VkFormat {
VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
VK_FORMAT_R16G16_S10_5_NV = 1000464000,
+ VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = 1000470000,
+ VK_FORMAT_A8_UNORM_KHR = 1000470001,
VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
@@ -2111,6 +2136,9 @@ typedef enum VkAttachmentStoreOp {
typedef enum VkPipelineBindPoint {
VK_PIPELINE_BIND_POINT_GRAPHICS = 0,
VK_PIPELINE_BIND_POINT_COMPUTE = 1,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX = 1000134000,
+#endif
VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000,
VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003,
VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
@@ -2499,6 +2527,9 @@ typedef enum VkBufferUsageFlagBits {
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800,
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000,
VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000,
+#endif
VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000,
VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000,
VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400,
@@ -10399,6 +10430,171 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR(
#endif
+// VK_KHR_maintenance5 is a preprocessor guard. Do not pass it to API calls.
+#define VK_KHR_maintenance5 1
+#define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1
+#define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5"
+typedef VkFlags64 VkPipelineCreateFlags2KHR;
+
+// Flag bits for VkPipelineCreateFlagBits2KHR
+typedef VkFlags64 VkPipelineCreateFlagBits2KHR;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV = 0x10000000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR = 0x00000020ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR = 0x00000400ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR = 0x00800000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR = 0x00040000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR = 0x00100000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR = 0x00400000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR = 0x01000000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR = 0x02000000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR = 0x04000000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR = 0x08000000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR = 0x40000000ULL;
+static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR = 0x20000000ULL;
+
+typedef VkFlags64 VkBufferUsageFlags2KHR;
+
+// Flag bits for VkBufferUsageFlagBits2KHR
+typedef VkFlags64 VkBufferUsageFlagBits2KHR;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR = 0x00000200ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR = 0x00000400ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR = 0x00000800ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR = 0x00001000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL;
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL;
+#endif
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR = 0x00200000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR = 0x00400000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR = 0x04000000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00800000ULL;
+static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR = 0x01000000ULL;
+
+typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 maintenance5;
+} VkPhysicalDeviceMaintenance5FeaturesKHR;
+
+typedef struct VkPhysicalDeviceMaintenance5PropertiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting;
+ VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting;
+ VkBool32 depthStencilSwizzleOneSupport;
+ VkBool32 polygonModePointSize;
+ VkBool32 nonStrictSinglePixelWideLinesUseParallelogram;
+ VkBool32 nonStrictWideLinesUseParallelogram;
+} VkPhysicalDeviceMaintenance5PropertiesKHR;
+
+typedef struct VkRenderingAreaInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t viewMask;
+ uint32_t colorAttachmentCount;
+ const VkFormat* pColorAttachmentFormats;
+ VkFormat depthAttachmentFormat;
+ VkFormat stencilAttachmentFormat;
+} VkRenderingAreaInfoKHR;
+
+typedef struct VkImageSubresource2KHR {
+ VkStructureType sType;
+ void* pNext;
+ VkImageSubresource imageSubresource;
+} VkImageSubresource2KHR;
+
+typedef struct VkDeviceImageSubresourceInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ const VkImageCreateInfo* pCreateInfo;
+ const VkImageSubresource2KHR* pSubresource;
+} VkDeviceImageSubresourceInfoKHR;
+
+typedef struct VkSubresourceLayout2KHR {
+ VkStructureType sType;
+ void* pNext;
+ VkSubresourceLayout subresourceLayout;
+} VkSubresourceLayout2KHR;
+
+typedef struct VkPipelineCreateFlags2CreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineCreateFlags2KHR flags;
+} VkPipelineCreateFlags2CreateInfoKHR;
+
+typedef struct VkBufferUsageFlags2CreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkBufferUsageFlags2KHR usage;
+} VkBufferUsageFlags2CreateInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType);
+typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2KHR* pLayout);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR(
+ VkCommandBuffer commandBuffer,
+ VkBuffer buffer,
+ VkDeviceSize offset,
+ VkDeviceSize size,
+ VkIndexType indexType);
+
+VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR(
+ VkDevice device,
+ const VkRenderingAreaInfoKHR* pRenderingAreaInfo,
+ VkExtent2D* pGranularity);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR(
+ VkDevice device,
+ const VkDeviceImageSubresourceInfoKHR* pInfo,
+ VkSubresourceLayout2KHR* pLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR(
+ VkDevice device,
+ VkImage image,
+ const VkImageSubresource2KHR* pSubresource,
+ VkSubresourceLayout2KHR* pLayout);
+#endif
+
+
// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_ray_tracing_position_fetch 1
#define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1
@@ -13993,23 +14189,15 @@ typedef struct VkHostImageCopyDevicePerformanceQueryEXT {
VkBool32 identicalMemoryLayout;
} VkHostImageCopyDevicePerformanceQueryEXT;
-typedef struct VkSubresourceLayout2EXT {
- VkStructureType sType;
- void* pNext;
- VkSubresourceLayout subresourceLayout;
-} VkSubresourceLayout2EXT;
+typedef VkSubresourceLayout2KHR VkSubresourceLayout2EXT;
-typedef struct VkImageSubresource2EXT {
- VkStructureType sType;
- void* pNext;
- VkImageSubresource imageSubresource;
-} VkImageSubresource2EXT;
+typedef VkImageSubresource2KHR VkImageSubresource2EXT;
typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo);
typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo);
typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo);
typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions);
-typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT(
@@ -14032,8 +14220,8 @@ VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT(
VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(
VkDevice device,
VkImage image,
- const VkImageSubresource2EXT* pSubresource,
- VkSubresourceLayout2EXT* pLayout);
+ const VkImageSubresource2KHR* pSubresource,
+ VkSubresourceLayout2KHR* pLayout);
#endif
@@ -14945,7 +15133,7 @@ typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
typedef struct VkGraphicsPipelineLibraryCreateInfoEXT {
VkStructureType sType;
- void* pNext;
+ const void* pNext;
VkGraphicsPipelineLibraryFlagsEXT flags;
} VkGraphicsPipelineLibraryCreateInfoEXT;
diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp
index 27cb036..effa298 100644
--- a/include/vulkan/vulkan_enums.hpp
+++ b/include/vulkan/vulkan_enums.hpp
@@ -577,8 +577,15 @@ namespace VULKAN_HPP_NAMESPACE
eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID,
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
- eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+ ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+ eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ ePhysicalDeviceShaderEnqueueFeaturesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX,
+ ePhysicalDeviceShaderEnqueuePropertiesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX,
+ eExecutionGraphPipelineScratchSizeAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX,
+ eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX,
+ ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
@@ -1090,6 +1097,14 @@ namespace VULKAN_HPP_NAMESPACE
eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
+ ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR,
+ ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR,
+ eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR,
+ eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR,
+ eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR,
+ eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR,
+ ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR,
+ eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR,
ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR,
ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT,
ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT,
@@ -1500,7 +1515,9 @@ namespace VULKAN_HPP_NAMESPACE
eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT,
eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
- eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV
+ eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
+ eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR,
+ eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR
};
enum class FormatFeatureFlagBits : VkFormatFeatureFlags
@@ -2115,21 +2132,24 @@ namespace VULKAN_HPP_NAMESPACE
enum class BufferUsageFlagBits : VkBufferUsageFlags
{
- eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
- eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
- eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
- eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
- eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
- eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
- eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
- eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
- eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
- eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
- eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
- eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
- eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
- eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
- eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+ eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+ eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+ eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+ eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+ eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+ eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
+ eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+ eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
+ eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
+ eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
+ eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
+ eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
@@ -2158,8 +2178,11 @@ namespace VULKAN_HPP_NAMESPACE
BufferUsageFlagBits::eStorageTexelBuffer | BufferUsageFlagBits::eUniformBuffer | BufferUsageFlagBits::eStorageBuffer | BufferUsageFlagBits::eIndexBuffer |
BufferUsageFlagBits::eVertexBuffer | BufferUsageFlagBits::eIndirectBuffer | BufferUsageFlagBits::eShaderDeviceAddress |
BufferUsageFlagBits::eVideoDecodeSrcKHR | BufferUsageFlagBits::eVideoDecodeDstKHR | BufferUsageFlagBits::eTransformFeedbackBufferEXT |
- BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT |
- BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR |
+ BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ | BufferUsageFlagBits::eExecutionGraphScratchAMDX
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR |
BufferUsageFlagBits::eShaderBindingTableKHR
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR
@@ -3046,8 +3069,11 @@ namespace VULKAN_HPP_NAMESPACE
enum class PipelineBindPoint
{
- eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
- eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+ eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+ eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eExecutionGraphAMDX = VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI
@@ -6608,6 +6634,129 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints;
};
+ //=== VK_KHR_maintenance5 ===
+
+ enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR
+ {
+ eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR,
+ eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR,
+ eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eReserved28NV = VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
+ eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR,
+ eDeferCompile = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR,
+ eCaptureStatistics = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR,
+ eCaptureInternalRepresentations = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+ eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR,
+ eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR,
+ eLinkTimeOptimization = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR,
+ eRetainLinkTimeOptimizationInfo = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR,
+ eLibrary = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR,
+ eRayTracingSkipTriangles = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
+ eRayTracingSkipAabbs = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR,
+ eRayTracingNoNullAnyHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
+ eRayTracingNoNullClosestHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
+ eRayTracingNoNullMissShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
+ eRayTracingNoNullIntersectionShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
+ eRayTracingShaderGroupHandleCaptureReplay = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
+ eIndirectBindable = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR,
+ eRayTracingAllowMotion = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR,
+ eRenderingFragmentShadingRateAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+ eRenderingFragmentDensityMapAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR,
+ eRayTracingOpacityMicromap = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR,
+ eColorAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR,
+ eDepthStencilAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR,
+ eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR,
+ eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR,
+ eDescriptorBuffer = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR
+ };
+
+ using PipelineCreateFlags2KHR = Flags<PipelineCreateFlagBits2KHR>;
+
+ template <>
+ struct FlagTraits<PipelineCreateFlagBits2KHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags =
+ PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ | PipelineCreateFlagBits2KHR::eReserved28NV
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompile |
+ PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations |
+ PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure |
+ PipelineCreateFlagBits2KHR::eLinkTimeOptimization | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo | PipelineCreateFlagBits2KHR::eLibrary |
+ PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs |
+ PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders |
+ PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders |
+ PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindable |
+ PipelineCreateFlagBits2KHR::eRayTracingAllowMotion | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment |
+ PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap |
+ PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop |
+ PipelineCreateFlagBits2KHR::eNoProtectedAccess | PipelineCreateFlagBits2KHR::eProtectedAccessOnly | PipelineCreateFlagBits2KHR::eDescriptorBuffer;
+ };
+
+ enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR
+ {
+ eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR,
+ eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR,
+ eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR,
+ eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR,
+ eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR,
+ eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR,
+ eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR,
+ eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR,
+ eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eConditionalRendering = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR,
+ eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR,
+ eRayTracing = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR,
+ eTransformFeedbackBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR,
+ eTransformFeedbackCounterBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR,
+ eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR,
+ eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eVideoEncodeDst = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR,
+ eVideoEncodeSrc = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR,
+ eAccelerationStructureBuildInputReadOnly = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
+ eAccelerationStructureStorage = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
+ eSamplerDescriptorBuffer = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR,
+ eResourceDescriptorBuffer = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR,
+ ePushDescriptorsDescriptorBuffer = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR,
+ eMicromapBuildInputReadOnly = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR,
+ eMicromapStorage = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR
+ };
+
+ using BufferUsageFlags2KHR = Flags<BufferUsageFlagBits2KHR>;
+
+ template <>
+ struct FlagTraits<BufferUsageFlagBits2KHR>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2KHR allFlags =
+ BufferUsageFlagBits2KHR::eTransferSrc | BufferUsageFlagBits2KHR::eTransferDst | BufferUsageFlagBits2KHR::eUniformTexelBuffer |
+ BufferUsageFlagBits2KHR::eStorageTexelBuffer | BufferUsageFlagBits2KHR::eUniformBuffer | BufferUsageFlagBits2KHR::eStorageBuffer |
+ BufferUsageFlagBits2KHR::eIndexBuffer | BufferUsageFlagBits2KHR::eVertexBuffer | BufferUsageFlagBits2KHR::eIndirectBuffer
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ | BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | BufferUsageFlagBits2KHR::eConditionalRendering | BufferUsageFlagBits2KHR::eShaderBindingTable | BufferUsageFlagBits2KHR::eTransformFeedbackBuffer |
+ BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer | BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ | BufferUsageFlagBits2KHR::eVideoEncodeDst | BufferUsageFlagBits2KHR::eVideoEncodeSrc
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | BufferUsageFlagBits2KHR::eShaderDeviceAddress | BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly |
+ BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer |
+ BufferUsageFlagBits2KHR::eResourceDescriptorBuffer | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer |
+ BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly | BufferUsageFlagBits2KHR::eMicromapStorage;
+ };
+
//=== VK_EXT_shader_object ===
enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT
diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp
index 5084b29..548a32f 100644
--- a/include/vulkan/vulkan_extension_inspection.hpp
+++ b/include/vulkan/vulkan_extension_inspection.hpp
@@ -164,7 +164,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
"VK_EXT_sampler_filter_minmax",
"VK_KHR_storage_buffer_storage_class",
-"VK_AMD_gpu_shader_int16",
+"VK_AMD_gpu_shader_int16",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_AMDX_shader_enqueue",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
"VK_AMD_mixed_attachment_samples",
"VK_AMD_shader_fragment_mask",
"VK_EXT_inline_uniform_block",
@@ -379,6 +382,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_NV_optical_flow",
"VK_EXT_legacy_dithering",
"VK_EXT_pipeline_protected_access",
+"VK_KHR_maintenance5",
"VK_KHR_ray_tracing_position_fetch",
"VK_EXT_shader_object",
"VK_QCOM_tile_properties",
@@ -579,7 +583,10 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
{ "VK_ANDROID_external_memory_android_hardware_buffer", { { "VK_VERSION_1_0", { { "VK_KHR_sampler_ycbcr_conversion", "VK_KHR_external_memory", "VK_EXT_queue_family_foreign", "VK_KHR_dedicated_allocation", } } } } },
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-{ "VK_EXT_sampler_filter_minmax", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
+{ "VK_EXT_sampler_filter_minmax", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+{ "VK_AMDX_shader_enqueue", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_synchronization2", "VK_KHR_pipeline_library", "VK_KHR_spirv_1_4", } } } } },
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
{ "VK_EXT_inline_uniform_block", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_maintenance1", } } } } },
{ "VK_EXT_sample_locations", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_EXT_blend_operation_advanced", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
@@ -774,6 +781,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_NV_optical_flow", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_format_feature_flags2", "VK_KHR_synchronization2", } } } } },
{ "VK_EXT_legacy_dithering", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
{ "VK_EXT_pipeline_protected_access", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
+{ "VK_KHR_maintenance5", { { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } } } },
{ "VK_KHR_ray_tracing_position_fetch", { { "VK_VERSION_1_0", { { "VK_KHR_acceleration_structure", } } } } },
{ "VK_EXT_shader_object", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { { } } } } },
{ "VK_QCOM_tile_properties", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
@@ -1386,7 +1394,11 @@ namespace VULKAN_HPP_NAMESPACE
|| ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" )
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|| ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) ||
- ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) ||
+ ( extension == "VK_AMD_gpu_shader_int16" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ || ( extension == "VK_AMDX_shader_enqueue" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) ||
( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_EXT_sample_locations" ) ||
( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) ||
( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) ||
@@ -1493,7 +1505,7 @@ namespace VULKAN_HPP_NAMESPACE
( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) ||
( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) ||
( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) ||
- ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) ||
+ ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) || ( extension == "VK_KHR_maintenance5" ) ||
( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_QCOM_tile_properties" ) ||
( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) ||
( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_EXT_mutable_descriptor_type" ) ||
diff --git a/include/vulkan/vulkan_format_traits.hpp b/include/vulkan/vulkan_format_traits.hpp
index 7d72bbe..16cbabb 100644
--- a/include/vulkan/vulkan_format_traits.hpp
+++ b/include/vulkan/vulkan_format_traits.hpp
@@ -362,6 +362,8 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1;
default: VULKAN_HPP_ASSERT( false ); return 0;
}
@@ -619,6 +621,8 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP";
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP";
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit";
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return "16-bit";
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return "8-bit alpha";
default: VULKAN_HPP_ASSERT( false ); return "";
}
@@ -2007,6 +2011,21 @@ namespace VULKAN_HPP_NAMESPACE
case 1: return 16;
default: VULKAN_HPP_ASSERT( false ); return 0;
}
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR:
+ switch ( component )
+ {
+ case 0: return 1;
+ case 1: return 5;
+ case 2: return 5;
+ case 3: return 5;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
default: return 0;
}
@@ -2264,6 +2283,8 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1;
default: return 0;
}
@@ -4284,6 +4305,21 @@ namespace VULKAN_HPP_NAMESPACE
case 1: return "G";
default: VULKAN_HPP_ASSERT( false ); return "";
}
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR:
+ switch ( component )
+ {
+ case 0: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
default: return "";
}
@@ -6304,6 +6340,21 @@ namespace VULKAN_HPP_NAMESPACE
case 1: return "SINT";
default: VULKAN_HPP_ASSERT( false ); return "";
}
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
default: return "";
}
@@ -6744,6 +6795,7 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16;
case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16;
case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 16;
default: return 0;
}
@@ -7605,6 +7657,8 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1;
default: VULKAN_HPP_ASSERT( false ); return 0;
}
diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp
index f4334ea..1ab8736 100644
--- a/include/vulkan/vulkan_funcs.hpp
+++ b/include/vulkan/vulkan_funcs.hpp
@@ -13584,6 +13584,318 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+ }
+
+ template <typename PipelineAllocator,
+ typename Dispatch,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+ VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
+ Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+ VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch, typename PipelineAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createExecutionGraphPipelinesAMDXUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+ }
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+ }
+
+ template <typename Dispatch,
+ typename PipelineAllocator,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createExecutionGraphPipelinesAMDXUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+ }
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+ VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /* VULKAN_HPP_NO_SMART_HANDLE */
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX(
+ m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type
+ Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
+ VkResult result = d.vkGetExecutionGraphPipelineScratchSizeAMDX(
+ m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sizeInfo );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
+ uint32_t * pNodeIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX(
+ m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX(
+ VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ uint32_t nodeIndex;
+ VkResult result = d.vkGetExecutionGraphPipelineNodeIndexAMDX(
+ m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), nodeIndex );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdDispatchGraphIndirectAMDX(
+ m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdDispatchGraphIndirectAMDX(
+ m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
template <typename Dispatch>
@@ -18104,45 +18416,45 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetImageSubresourceLayout2EXT( m_device,
static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
+ reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT(
- VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
d.vkGetImageSubresourceLayout2EXT( m_device,
static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return layout;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
- VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
d.vkGetImageSubresourceLayout2EXT( m_device,
static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return structureChain;
}
@@ -21935,6 +22247,133 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+ //=== VK_KHR_maintenance5 ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::IndexType indexType,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdBindIndexBuffer2KHR( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkDeviceSize>( size ),
+ static_cast<VkIndexType>( indexType ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,
+ VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetRenderingAreaGranularityKHR(
+ m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
+ Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::Extent2D granularity;
+ d.vkGetRenderingAreaGranularityKHR(
+ m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+
+ return granularity;
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetDeviceImageSubresourceLayoutKHR(
+ m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
+ d.vkGetDeviceImageSubresourceLayoutKHR(
+ m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
+ d.vkGetDeviceImageSubresourceLayoutKHR(
+ m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return structureChain;
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetImageSubresourceLayout2KHR( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
+ d.vkGetImageSubresourceLayout2KHR( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
+ d.vkGetImageSubresourceLayout2KHR( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return structureChain;
+ }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
//=== VK_EXT_shader_object ===
template <typename Dispatch>
diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp
index 9c6f0f6..9e7304f 100644
--- a/include/vulkan/vulkan_handles.hpp
+++ b/include/vulkan/vulkan_handles.hpp
@@ -829,6 +829,18 @@ namespace VULKAN_HPP_NAMESPACE
struct AndroidHardwareBufferFormatProperties2ANDROID;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ struct PhysicalDeviceShaderEnqueueFeaturesAMDX;
+ struct PhysicalDeviceShaderEnqueuePropertiesAMDX;
+ struct ExecutionGraphPipelineScratchSizeAMDX;
+ struct ExecutionGraphPipelineCreateInfoAMDX;
+ struct DispatchGraphInfoAMDX;
+ struct DispatchGraphCountInfoAMDX;
+ struct PipelineShaderStageNodeCreateInfoAMDX;
+ union DeviceOrHostAddressConstAMDX;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
struct SampleLocationEXT;
struct SampleLocationsInfoEXT;
@@ -1152,8 +1164,6 @@ namespace VULKAN_HPP_NAMESPACE
struct HostImageLayoutTransitionInfoEXT;
struct SubresourceHostMemcpySizeEXT;
struct HostImageCopyDevicePerformanceQueryEXT;
- struct SubresourceLayout2EXT;
- struct ImageSubresource2EXT;
//=== VK_KHR_map_memory2 ===
struct MemoryMapInfoKHR;
@@ -1598,6 +1608,18 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_pipeline_protected_access ===
struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ //=== VK_KHR_maintenance5 ===
+ struct PhysicalDeviceMaintenance5FeaturesKHR;
+ struct PhysicalDeviceMaintenance5PropertiesKHR;
+ struct RenderingAreaInfoKHR;
+ struct DeviceImageSubresourceInfoKHR;
+ struct ImageSubresource2KHR;
+ using ImageSubresource2EXT = ImageSubresource2KHR;
+ struct SubresourceLayout2KHR;
+ using SubresourceLayout2EXT = SubresourceLayout2KHR;
+ struct PipelineCreateFlags2CreateInfoKHR;
+ struct BufferUsageFlags2CreateInfoKHR;
+
//=== VK_KHR_ray_tracing_position_fetch ===
struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
@@ -5081,6 +5103,41 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -6161,6 +6218,15 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+ //=== VK_KHR_maintenance5 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::IndexType indexType,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_shader_object ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -10860,6 +10926,94 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
+ createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
+ createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
+ createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createExecutionGraphPipelinesAMDXUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createExecutionGraphPipelinesAMDXUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createExecutionGraphPipelineAMDXUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /* VULKAN_HPP_NO_SMART_HANDLE */
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result
+ getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type
+ getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
+ uint32_t * pNodeIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<uint32_t>::type
+ getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_KHR_get_memory_requirements2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -11944,19 +12098,19 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
@@ -12793,6 +12947,52 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_maintenance5 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,
+ VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
+ getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
//=== VK_EXT_shader_object ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp
index f7305e7..d194824 100644
--- a/include/vulkan/vulkan_hash.hpp
+++ b/include/vulkan/vulkan_hash.hpp
@@ -1922,6 +1922,19 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR const & bufferUsageFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.usage );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo ) const VULKAN_HPP_NOEXCEPT
@@ -3744,6 +3757,33 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR const & imageSubresource2KHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.imageSubresource );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR const & deviceImageSubresourceInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pCreateInfo );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pSubresource );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & deviceMemoryOpaqueCaptureAddressInfo ) const VULKAN_HPP_NOEXCEPT
@@ -4242,6 +4282,57 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries );
+ return seed;
+ }
+ };
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & executionGraphPipelineCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.stageCount );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pStages );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pLibraryInfo );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.layout );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineHandle );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineIndex );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const & executionGraphPipelineScratchSizeAMDX ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.size );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT
@@ -5632,19 +5723,6 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const & imageSubresource2EXT ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.imageSubresource );
- return seed;
- }
- };
-
- template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
@@ -8977,6 +9055,39 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR const & physicalDeviceMaintenance5FeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.maintenance5 );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR const & physicalDeviceMaintenance5PropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentMultisampleCoverageAfterSampleCounting );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentSampleMaskTestBeforeSampleCounting );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.depthStencilSwizzleOneSupport );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.polygonModePointSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictSinglePixelWideLinesUseParallelogram );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictWideLinesUseParallelogram );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>
{
std::size_t
@@ -10335,6 +10446,42 @@ namespace std
}
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const & physicalDeviceShaderEnqueueFeaturesAMDX ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const & physicalDeviceShaderEnqueuePropertiesAMDX ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphDepth );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderOutputNodes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>
{
@@ -11544,6 +11691,19 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR const & pipelineCreateFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.flags );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback ) const VULKAN_HPP_NOEXCEPT
@@ -11742,20 +11902,6 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount );
- VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries );
- return seed;
- }
- };
-
- template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const & pipelinePropertiesIdentifierEXT ) const VULKAN_HPP_NOEXCEPT
@@ -11941,6 +12087,26 @@ namespace std
}
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const & pipelineShaderStageNodeCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.pNext );
+ for ( const char * p = pipelineShaderStageNodeCreateInfoAMDX.pName; *p != '\0'; ++p )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, *p );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.index );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>
{
@@ -12869,6 +13035,23 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR const & renderingAreaInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.viewMask );
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.colorAttachmentCount );
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pColorAttachmentFormats );
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.depthAttachmentFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.stencilAttachmentFormat );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const & renderingFragmentDensityMapAttachmentInfoEXT ) const
@@ -13579,14 +13762,14 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>
+ struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR const & subresourceLayout2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.subresourceLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.subresourceLayout );
return seed;
}
};
diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp
index 741f69d..f065519 100644
--- a/include/vulkan/vulkan_raii.hpp
+++ b/include/vulkan/vulkan_raii.hpp
@@ -1068,6 +1068,20 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) );
+ vkGetExecutionGraphPipelineScratchSizeAMDX =
+ PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
+ vkGetExecutionGraphPipelineNodeIndexAMDX =
+ PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
+ vkCmdInitializeGraphScratchMemoryAMDX =
+ PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
+ vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) );
+ vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) );
+ vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
@@ -1337,6 +1351,8 @@ namespace VULKAN_HPP_NAMESPACE
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+ if ( !vkGetImageSubresourceLayout2KHR )
+ vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
@@ -1629,6 +1645,13 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+ //=== VK_KHR_maintenance5 ===
+ vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) );
+ vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) );
+ vkGetDeviceImageSubresourceLayoutKHR =
+ PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) );
+ vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) );
+
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
@@ -2030,6 +2053,25 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+ PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0;
+ PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0;
+ PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0;
+ PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0;
+ PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0;
+ PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0;
+ PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0;
+# else
+ PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0;
+ PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0;
+ PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0;
+ PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0;
+ PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0;
+ PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0;
+ PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
@@ -2449,6 +2491,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+ //=== VK_KHR_maintenance5 ===
+ PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0;
+ PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0;
+ PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0;
+ PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0;
+
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
@@ -3836,6 +3884,20 @@ namespace VULKAN_HPP_NAMESPACE
getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const;
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createExecutionGraphPipelinesAMDX(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createExecutionGraphPipelineAMDX(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_KHR_get_memory_requirements2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
@@ -4262,6 +4324,18 @@ namespace VULKAN_HPP_NAMESPACE
createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ //=== VK_KHR_maintenance5 ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
+ getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
@@ -5548,6 +5622,21 @@ namespace VULKAN_HPP_NAMESPACE
void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT;
+
+ void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT;
@@ -6010,6 +6099,13 @@ namespace VULKAN_HPP_NAMESPACE
void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_KHR_maintenance5 ===
+
+ void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_shader_object ===
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
@@ -8152,12 +8248,21 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_host_image_copy ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
- getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_KHR_maintenance5 ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
- getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
+ getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
@@ -8916,6 +9021,30 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ 1,
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
+ if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
+ ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ detail::throwResultException( m_constructorSuccessCode, "vkCreateExecutionGraphPipelinesAMDX" );
+ }
+ }
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
@@ -9087,6 +9216,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD std::vector<uint8_t> getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const;
+
+ VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_KHR_ray_tracing_pipeline ===
template <typename DataType>
@@ -9154,6 +9291,36 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ {
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
+ std::vector<VkPipeline> pipelines( createInfos.size() );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateExecutionGraphPipelinesAMDX(
+ static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ createInfos.size(),
+ reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ pipelines.data() ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ this->reserve( createInfos.size() );
+ for ( auto const & pipeline : pipelines )
+ {
+ this->emplace_back( device, pipeline, allocator, result );
+ }
+ }
+ else
+ {
+ detail::throwResultException( result, "vkCreateExecutionGraphPipelinesAMDX" );
+ }
+ }
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
@@ -16286,6 +16453,95 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMDX_shader_enqueue ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createExecutionGraphPipelinesAMDX(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createExecutionGraphPipelineAMDX(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX &&
+ "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" );
+
+ VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
+ VkResult result = getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" );
+
+ return sizeInfo;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
+ Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX &&
+ "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" );
+
+ uint32_t nodeIndex;
+ VkResult result =
+ getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ),
+ &nodeIndex );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" );
+
+ return nodeIndex;
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX &&
+ "Function <vkCmdInitializeGraphScratchMemoryAMDX> requires <VK_AMDX_shader_enqueue>" );
+
+ getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" );
+
+ getDispatcher()->vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDeviceAddress>( scratch ),
+ reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" );
+
+ getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDeviceAddress>( scratch ),
+ reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX &&
+ "Function <vkCmdDispatchGraphIndirectCountAMDX> requires <VK_AMDX_shader_enqueue>" );
+
+ getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
+ }
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
VULKAN_HPP_INLINE void
@@ -18236,34 +18492,36 @@ namespace VULKAN_HPP_NAMESPACE
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
- Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
- "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control>" );
+ VULKAN_HPP_ASSERT(
+ getDispatcher()->vkGetImageSubresourceLayout2EXT &&
+ "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return layout;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
- Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
+ Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
- "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control>" );
+ VULKAN_HPP_ASSERT(
+ getDispatcher()->vkGetImageSubresourceLayout2EXT &&
+ "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
- reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return structureChain;
}
@@ -20185,6 +20443,99 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
}
+ //=== VK_KHR_maintenance5 ===
+
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function <vkCmdBindIndexBuffer2KHR> requires <VK_KHR_maintenance5>" );
+
+ getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkDeviceSize>( size ),
+ static_cast<VkIndexType>( indexType ) );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
+ Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5>" );
+
+ VULKAN_HPP_NAMESPACE::Extent2D granularity;
+ getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ),
+ reinterpret_cast<VkExtent2D *>( &granularity ) );
+
+ return granularity;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
+ "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
+ getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
+ "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
+ getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return structureChain;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
+ Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT(
+ getDispatcher()->vkGetImageSubresourceLayout2KHR &&
+ "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
+ getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
+ Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT(
+ getDispatcher()->vkGetImageSubresourceLayout2KHR &&
+ "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
+
+ VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
+ getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
+
+ return structureChain;
+ }
+
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp
index ba79591..1fb34db 100644
--- a/include/vulkan/vulkan_static_assertions.hpp
+++ b/include/vulkan/vulkan_static_assertions.hpp
@@ -3220,6 +3220,63 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPAC
"AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_AMDX_shader_enqueue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueueFeaturesAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>::value,
+ "PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>::value,
+ "PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::value,
+ "ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>::value,
+ "ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX>::value,
+ "DispatchGraphInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX>::value,
+ "DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>::value,
+ "PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX>::value,
+ "DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_EXT_sample_locations ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
@@ -4623,17 +4680,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HostImag
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>::value,
"HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" );
-VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ),
- "struct and wrapper have different size!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "struct wrapper is not a standard layout!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value,
- "SubresourceLayout2EXT is not nothrow_move_constructible!" );
-
-VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "struct wrapper is not a standard layout!" );
-VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value,
- "ImageSubresource2EXT is not nothrow_move_constructible!" );
-
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" );
@@ -6567,6 +6613,56 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
"PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
+//=== VK_KHR_maintenance5 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance5FeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>::value,
+ "PhysicalDeviceMaintenance5FeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance5PropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>::value,
+ "PhysicalDeviceMaintenance5PropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR ) == sizeof( VkRenderingAreaInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>::value,
+ "RenderingAreaInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR ) == sizeof( VkDeviceImageSubresourceInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>::value,
+ "DeviceImageSubresourceInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR ) == sizeof( VkImageSubresource2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>::value,
+ "ImageSubresource2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR ) == sizeof( VkSubresourceLayout2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>::value,
+ "SubresourceLayout2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR ) == sizeof( VkPipelineCreateFlags2CreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>::value,
+ "PipelineCreateFlags2CreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR ) == sizeof( VkBufferUsageFlags2CreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>::value,
+ "BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" );
+
//=== VK_KHR_ray_tracing_position_fetch ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) ==
diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp
index 97dbee4..8ffe8f2 100644
--- a/include/vulkan/vulkan_structs.hpp
+++ b/include/vulkan/vulkan_structs.hpp
@@ -12512,6 +12512,103 @@ namespace VULKAN_HPP_NAMESPACE
};
using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
+ struct BufferUsageFlags2CreateInfoKHR
+ {
+ using NativeType = VkBufferUsageFlags2CreateInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , usage( usage_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BufferUsageFlags2CreateInfoKHR( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : BufferUsageFlags2CreateInfoKHR( *reinterpret_cast<BufferUsageFlags2CreateInfoKHR const *>( &rhs ) )
+ {
+ }
+
+ BufferUsageFlags2CreateInfoKHR & operator=( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ BufferUsageFlags2CreateInfoKHR & operator=( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usage = usage_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkBufferUsageFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBufferUsageFlags2CreateInfoKHR *>( this );
+ }
+
+ operator VkBufferUsageFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBufferUsageFlags2CreateInfoKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, usage );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( BufferUsageFlags2CreateInfoKHR const & ) const = default;
+#else
+ bool operator==( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
+# endif
+ }
+
+ bool operator!=( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eBufferUsageFlags2CreateInfoKHR>
+ {
+ using Type = BufferUsageFlags2CreateInfoKHR;
+ };
+
struct BufferViewCreateInfo
{
using NativeType = VkBufferViewCreateInfo;
@@ -28425,6 +28522,213 @@ namespace VULKAN_HPP_NAMESPACE
};
using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
+ struct ImageSubresource2KHR
+ {
+ using NativeType = VkImageSubresource2KHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageSubresource( imageSubresource_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSubresource2KHR( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageSubresource2KHR( *reinterpret_cast<ImageSubresource2KHR const *>( &rhs ) )
+ {
+ }
+
+ ImageSubresource2KHR & operator=( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImageSubresource2KHR & operator=( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageSubresource = imageSubresource_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImageSubresource2KHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageSubresource2KHR *>( this );
+ }
+
+ operator VkImageSubresource2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageSubresource2KHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageSubresource const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, imageSubresource );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImageSubresource2KHR const & ) const = default;
+#else
+ bool operator==( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource );
+# endif
+ }
+
+ bool operator!=( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2KHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageSubresource2KHR>
+ {
+ using Type = ImageSubresource2KHR;
+ };
+ using ImageSubresource2EXT = ImageSubresource2KHR;
+
+ struct DeviceImageSubresourceInfoKHR
+ {
+ using NativeType = VkDeviceImageSubresourceInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pCreateInfo( pCreateInfo_ )
+ , pSubresource( pSubresource_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceImageSubresourceInfoKHR( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceImageSubresourceInfoKHR( *reinterpret_cast<DeviceImageSubresourceInfoKHR const *>( &rhs ) )
+ {
+ }
+
+ DeviceImageSubresourceInfoKHR & operator=( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceImageSubresourceInfoKHR & operator=( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCreateInfo = pCreateInfo_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR &
+ setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSubresource = pSubresource_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceImageSubresourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( this );
+ }
+
+ operator VkDeviceImageSubresourceInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceImageSubresourceInfoKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pCreateInfo, pSubresource );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceImageSubresourceInfoKHR const & ) const = default;
+#else
+ bool operator==( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource );
+# endif
+ }
+
+ bool operator!=( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfoKHR;
+ const void * pNext = {};
+ const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceImageSubresourceInfoKHR>
+ {
+ using Type = DeviceImageSubresourceInfoKHR;
+ };
+
struct DeviceMemoryOpaqueCaptureAddressInfo
{
using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;
@@ -28733,6 +29037,51 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DeviceMemoryReportCallbackDataEXT;
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ union DeviceOrHostAddressConstAMDX
+ {
+ using NativeType = VkDeviceOrHostAddressConstAMDX;
+# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {}
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
+# endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
+
+# if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceAddress = deviceAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hostAddress = hostAddress_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_UNION_SETTERS*/
+
+ operator VkDeviceOrHostAddressConstAMDX const &() const
+ {
+ return *reinterpret_cast<const VkDeviceOrHostAddressConstAMDX *>( this );
+ }
+
+ operator VkDeviceOrHostAddressConstAMDX &()
+ {
+ return *reinterpret_cast<VkDeviceOrHostAddressConstAMDX *>( this );
+ }
+
+# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+ const void * hostAddress;
+# else
+ VkDeviceAddress deviceAddress;
+ const void * hostAddress;
+# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct DevicePrivateDataCreateInfo
{
using NativeType = VkDevicePrivateDataCreateInfo;
@@ -29410,6 +29759,175 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct DispatchGraphCountInfoAMDX
+ {
+ using NativeType = VkDispatchGraphCountInfoAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {},
+ uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
+ : count( count_ )
+ , infos( infos_ )
+ , stride( stride_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DispatchGraphCountInfoAMDX( *reinterpret_cast<DispatchGraphCountInfoAMDX const *>( &rhs ) )
+ {
+ }
+
+ DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
+ {
+ count = count_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ infos = infos_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stride = stride_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( this );
+ }
+
+ operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDispatchGraphCountInfoAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const &, uint64_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( count, infos, stride );
+ }
+# endif
+
+ public:
+ uint32_t count = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos = {};
+ uint64_t stride = {};
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct DispatchGraphInfoAMDX
+ {
+ using NativeType = VkDispatchGraphInfoAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t nodeIndex_ = {},
+ uint32_t payloadCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {},
+ uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT
+ : nodeIndex( nodeIndex_ )
+ , payloadCount( payloadCount_ )
+ , payloads( payloads_ )
+ , payloadStride( payloadStride_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DispatchGraphInfoAMDX( *reinterpret_cast<DispatchGraphInfoAMDX const *>( &rhs ) )
+ {
+ }
+
+ DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ nodeIndex = nodeIndex_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ payloadCount = payloadCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT
+ {
+ payloads = payloads_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ payloadStride = payloadStride_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDispatchGraphInfoAMDX *>( this );
+ }
+
+ operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDispatchGraphInfoAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const &, uint64_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( nodeIndex, payloadCount, payloads, payloadStride );
+ }
+# endif
+
+ public:
+ uint32_t nodeIndex = {};
+ uint32_t payloadCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads = {};
+ uint64_t payloadStride = {};
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct DispatchIndirectCommand
{
using NativeType = VkDispatchIndirectCommand;
@@ -31909,6 +32427,425 @@ namespace VULKAN_HPP_NAMESPACE
using Type = EventCreateInfo;
};
+ struct PipelineLibraryCreateInfoKHR
+ {
+ using NativeType = VkPipelineLibraryCreateInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , libraryCount( libraryCount_ )
+ , pLibraries( pLibraries_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ libraryCount = libraryCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pLibraries = pLibraries_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PipelineLibraryCreateInfoKHR &
+ setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ libraryCount = static_cast<uint32_t>( libraries_.size() );
+ pLibraries = libraries_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR *>( this );
+ }
+
+ operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, libraryCount, pLibraries );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
+#else
+ bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries );
+# endif
+ }
+
+ bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
+ const void * pNext = {};
+ uint32_t libraryCount = {};
+ const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
+ {
+ using Type = PipelineLibraryCreateInfoKHR;
+ };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct ExecutionGraphPipelineCreateInfoAMDX
+ {
+ using NativeType = VkExecutionGraphPipelineCreateInfoAMDX;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , stageCount( stageCount_ )
+ , pStages( pStages_ )
+ , pLibraryInfo( pLibraryInfo_ )
+ , layout( layout_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast<ExecutionGraphPipelineCreateInfoAMDX const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ ExecutionGraphPipelineCreateInfoAMDX(
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_,
+ const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , stageCount( static_cast<uint32_t>( stages_.size() ) )
+ , pStages( stages_.data() )
+ , pLibraryInfo( pLibraryInfo_ )
+ , layout( layout_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageCount = stageCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX &
+ setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pStages = pStages_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ ExecutionGraphPipelineCreateInfoAMDX &
+ setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageCount = static_cast<uint32_t>( stages_.size() );
+ pStages = stages_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX &
+ setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pLibraryInfo = pLibraryInfo_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ layout = layout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX &
+ setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( this );
+ }
+
+ operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExecutionGraphPipelineCreateInfoAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &,
+ const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * const &,
+ VULKAN_HPP_NAMESPACE::PipelineLayout const &,
+ VULKAN_HPP_NAMESPACE::Pipeline const &,
+ int32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default;
+# else
+ bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
+ ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
+ ( basePipelineIndex == rhs.basePipelineIndex );
+# endif
+ }
+
+ bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+ const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExecutionGraphPipelineCreateInfoAMDX>
+ {
+ using Type = ExecutionGraphPipelineCreateInfoAMDX;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct ExecutionGraphPipelineScratchSizeAMDX
+ {
+ using NativeType = VkExecutionGraphPipelineScratchSizeAMDX;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , size( size_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast<ExecutionGraphPipelineScratchSizeAMDX const *>( &rhs ) )
+ {
+ }
+
+ ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExecutionGraphPipelineScratchSizeAMDX *>( this );
+ }
+
+ operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, size );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default;
+# else
+ bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size );
+# endif
+ }
+
+ bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExecutionGraphPipelineScratchSizeAMDX>
+ {
+ using Type = ExecutionGraphPipelineScratchSizeAMDX;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct ExportFenceCreateInfo
{
using NativeType = VkExportFenceCreateInfo;
@@ -39119,7 +40056,7 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {},
- void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, flags( flags_ )
{
@@ -39142,7 +40079,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -39169,7 +40106,7 @@ namespace VULKAN_HPP_NAMESPACE
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -39197,7 +40134,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
- void * pNext = {};
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {};
};
@@ -42719,102 +43656,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
- struct ImageSubresource2EXT
- {
- using NativeType = VkImageSubresource2EXT;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2EXT;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pNext( pNext_ )
- , imageSubresource( imageSubresource_ )
- {
- }
-
- VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageSubresource2EXT( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageSubresource2EXT( *reinterpret_cast<ImageSubresource2EXT const *>( &rhs ) )
- {
- }
-
- ImageSubresource2EXT & operator=( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- ImageSubresource2EXT & operator=( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const *>( &rhs );
- return *this;
- }
-
-#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- imageSubresource = imageSubresource_;
- return *this;
- }
-#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- operator VkImageSubresource2EXT const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageSubresource2EXT *>( this );
- }
-
- operator VkImageSubresource2EXT &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageSubresource2EXT *>( this );
- }
-
-#if defined( VULKAN_HPP_USE_REFLECT )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageSubresource const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, imageSubresource );
- }
-#endif
-
-#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( ImageSubresource2EXT const & ) const = default;
-#else
- bool operator==( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if defined( VULKAN_HPP_USE_REFLECT )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource );
-# endif
- }
-
- bool operator!=( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2EXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {};
- };
-
- template <>
- struct CppType<StructureType, StructureType::eImageSubresource2EXT>
- {
- using Type = ImageSubresource2EXT;
- };
-
struct ImageSwapchainCreateInfoKHR
{
using NativeType = VkImageSwapchainCreateInfoKHR;
@@ -66640,6 +67481,219 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
+ struct PhysicalDeviceMaintenance5FeaturesKHR
+ {
+ using NativeType = VkPhysicalDeviceMaintenance5FeaturesKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maintenance5( maintenance5_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMaintenance5FeaturesKHR( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMaintenance5FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance5FeaturesKHR const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceMaintenance5FeaturesKHR & operator=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceMaintenance5FeaturesKHR & operator=( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maintenance5 = maintenance5_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceMaintenance5FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMaintenance5FeaturesKHR *>( this );
+ }
+
+ operator VkPhysicalDeviceMaintenance5FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMaintenance5FeaturesKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, maintenance5 );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceMaintenance5FeaturesKHR const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance5FeaturesKHR>
+ {
+ using Type = PhysicalDeviceMaintenance5FeaturesKHR;
+ };
+
+ struct PhysicalDeviceMaintenance5PropertiesKHR
+ {
+ using NativeType = VkPhysicalDeviceMaintenance5PropertiesKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , earlyFragmentMultisampleCoverageAfterSampleCounting( earlyFragmentMultisampleCoverageAfterSampleCounting_ )
+ , earlyFragmentSampleMaskTestBeforeSampleCounting( earlyFragmentSampleMaskTestBeforeSampleCounting_ )
+ , depthStencilSwizzleOneSupport( depthStencilSwizzleOneSupport_ )
+ , polygonModePointSize( polygonModePointSize_ )
+ , nonStrictSinglePixelWideLinesUseParallelogram( nonStrictSinglePixelWideLinesUseParallelogram_ )
+ , nonStrictWideLinesUseParallelogram( nonStrictWideLinesUseParallelogram_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMaintenance5PropertiesKHR( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMaintenance5PropertiesKHR( *reinterpret_cast<PhysicalDeviceMaintenance5PropertiesKHR const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceMaintenance5PropertiesKHR & operator=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceMaintenance5PropertiesKHR & operator=( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceMaintenance5PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMaintenance5PropertiesKHR *>( this );
+ }
+
+ operator VkPhysicalDeviceMaintenance5PropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMaintenance5PropertiesKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ earlyFragmentMultisampleCoverageAfterSampleCounting,
+ earlyFragmentSampleMaskTestBeforeSampleCounting,
+ depthStencilSwizzleOneSupport,
+ polygonModePointSize,
+ nonStrictSinglePixelWideLinesUseParallelogram,
+ nonStrictWideLinesUseParallelogram );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceMaintenance5PropertiesKHR const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
+ ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) &&
+ ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) &&
+ ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) &&
+ ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) &&
+ ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {};
+ VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {};
+ VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {};
+ VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance5PropertiesKHR>
+ {
+ using Type = PhysicalDeviceMaintenance5PropertiesKHR;
+ };
+
struct PhysicalDeviceMemoryBudgetPropertiesEXT
{
using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;
@@ -75719,6 +76773,260 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct PhysicalDeviceShaderEnqueueFeaturesAMDX
+ {
+ using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderEnqueue( shaderEnqueue_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueueFeaturesAMDX const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderEnqueue = shaderEnqueue_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderEnqueueFeaturesAMDX *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderEnqueueFeaturesAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderEnqueue );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default;
+# else
+ bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX>
+ {
+ using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct PhysicalDeviceShaderEnqueuePropertiesAMDX
+ {
+ using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {},
+ uint32_t maxExecutionGraphShaderOutputNodes_ = {},
+ uint32_t maxExecutionGraphShaderPayloadSize_ = {},
+ uint32_t maxExecutionGraphShaderPayloadCount_ = {},
+ uint32_t executionGraphDispatchAddressAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxExecutionGraphDepth( maxExecutionGraphDepth_ )
+ , maxExecutionGraphShaderOutputNodes( maxExecutionGraphShaderOutputNodes_ )
+ , maxExecutionGraphShaderPayloadSize( maxExecutionGraphShaderPayloadSize_ )
+ , maxExecutionGraphShaderPayloadCount( maxExecutionGraphShaderPayloadCount_ )
+ , executionGraphDispatchAddressAlignment( executionGraphDispatchAddressAlignment_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setMaxExecutionGraphDepth( uint32_t maxExecutionGraphDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxExecutionGraphDepth = maxExecutionGraphDepth_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX &
+ setMaxExecutionGraphShaderOutputNodes( uint32_t maxExecutionGraphShaderOutputNodes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxExecutionGraphShaderOutputNodes = maxExecutionGraphShaderOutputNodes_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX &
+ setMaxExecutionGraphShaderPayloadSize( uint32_t maxExecutionGraphShaderPayloadSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxExecutionGraphShaderPayloadSize = maxExecutionGraphShaderPayloadSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX &
+ setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX &
+ setExecutionGraphDispatchAddressAlignment( uint32_t executionGraphDispatchAddressAlignment_ ) VULKAN_HPP_NOEXCEPT
+ {
+ executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderEnqueuePropertiesAMDX *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderEnqueuePropertiesAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ maxExecutionGraphDepth,
+ maxExecutionGraphShaderOutputNodes,
+ maxExecutionGraphShaderPayloadSize,
+ maxExecutionGraphShaderPayloadCount,
+ executionGraphDispatchAddressAlignment );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default;
+# else
+ bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth ) &&
+ ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) &&
+ ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) &&
+ ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) &&
+ ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX;
+ void * pNext = {};
+ uint32_t maxExecutionGraphDepth = {};
+ uint32_t maxExecutionGraphShaderOutputNodes = {};
+ uint32_t maxExecutionGraphShaderPayloadSize = {};
+ uint32_t maxExecutionGraphShaderPayloadCount = {};
+ uint32_t executionGraphDispatchAddressAlignment = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX>
+ {
+ using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct PhysicalDeviceShaderFloat16Int8Features
{
using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;
@@ -84244,6 +85552,103 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineCoverageToColorStateCreateInfoNV;
};
+ struct PipelineCreateFlags2CreateInfoKHR
+ {
+ using NativeType = VkPipelineCreateFlags2CreateInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineCreateFlags2CreateInfoKHR( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineCreateFlags2CreateInfoKHR( *reinterpret_cast<PipelineCreateFlags2CreateInfoKHR const *>( &rhs ) )
+ {
+ }
+
+ PipelineCreateFlags2CreateInfoKHR & operator=( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelineCreateFlags2CreateInfoKHR & operator=( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelineCreateFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineCreateFlags2CreateInfoKHR *>( this );
+ }
+
+ operator VkPipelineCreateFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineCreateFlags2CreateInfoKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PipelineCreateFlags2CreateInfoKHR const & ) const = default;
+#else
+ bool operator==( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
+# endif
+ }
+
+ bool operator!=( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineCreateFlags2CreateInfoKHR>
+ {
+ using Type = PipelineCreateFlags2CreateInfoKHR;
+ };
+
struct PipelineCreationFeedback
{
using NativeType = VkPipelineCreationFeedback;
@@ -85801,130 +87206,6 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineLayoutCreateInfo;
};
- struct PipelineLibraryCreateInfoKHR
- {
- using NativeType = VkPipelineLibraryCreateInfoKHR;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {},
- const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {},
- const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pNext( pNext_ )
- , libraryCount( libraryCount_ )
- , pLibraries( pLibraries_ )
- {
- }
-
- VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
- {
- }
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_,
- const void * pNext_ = nullptr )
- : pNext( pNext_ ), libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
- {
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
- return *this;
- }
-
-#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
- {
- libraryCount = libraryCount_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
- {
- pLibraries = pLibraries_;
- return *this;
- }
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineLibraryCreateInfoKHR &
- setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
- {
- libraryCount = static_cast<uint32_t>( libraries_.size() );
- pLibraries = libraries_.data();
- return *this;
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR *>( this );
- }
-
- operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR *>( this );
- }
-
-#if defined( VULKAN_HPP_USE_REFLECT )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, libraryCount, pLibraries );
- }
-#endif
-
-#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
-#else
- bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if defined( VULKAN_HPP_USE_REFLECT )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries );
-# endif
- }
-
- bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
- const void * pNext = {};
- uint32_t libraryCount = {};
- const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {};
- };
-
- template <>
- struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
- {
- using Type = PipelineLibraryCreateInfoKHR;
- };
-
struct PipelinePropertiesIdentifierEXT
{
using NativeType = VkPipelinePropertiesIdentifierEXT;
@@ -87349,6 +88630,122 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT;
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct PipelineShaderStageNodeCreateInfoAMDX
+ {
+ using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pName( pName_ )
+ , index( index_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast<PipelineShaderStageNodeCreateInfoAMDX const *>( &rhs ) )
+ {
+ }
+
+ PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pName = pName_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
+ {
+ index = index_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( this );
+ }
+
+ operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineShaderStageNodeCreateInfoAMDX *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pName, index );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
+ return cmp;
+ if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
+ return cmp;
+ if ( pName != rhs.pName )
+ if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
+ return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+ if ( auto cmp = index <=> rhs.index; cmp != 0 )
+ return cmp;
+
+ return std::strong_ordering::equivalent;
+ }
+# endif
+
+ bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( index == rhs.index );
+ }
+
+ bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX;
+ const void * pNext = {};
+ const char * pName = {};
+ uint32_t index = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineShaderStageNodeCreateInfoAMDX>
+ {
+ using Type = PipelineShaderStageNodeCreateInfoAMDX;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
{
using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
@@ -95155,6 +96552,174 @@ namespace VULKAN_HPP_NAMESPACE
using Type = RenderPassTransformBeginInfoQCOM;
};
+ struct RenderingAreaInfoKHR
+ {
+ using NativeType = VkRenderingAreaInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( uint32_t viewMask_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , viewMask( viewMask_ )
+ , colorAttachmentCount( colorAttachmentCount_ )
+ , pColorAttachmentFormats( pColorAttachmentFormats_ )
+ , depthAttachmentFormat( depthAttachmentFormat_ )
+ , stencilAttachmentFormat( stencilAttachmentFormat_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderingAreaInfoKHR( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderingAreaInfoKHR( *reinterpret_cast<RenderingAreaInfoKHR const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ RenderingAreaInfoKHR( uint32_t viewMask_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , viewMask( viewMask_ )
+ , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
+ , pColorAttachmentFormats( colorAttachmentFormats_.data() )
+ , depthAttachmentFormat( depthAttachmentFormat_ )
+ , stencilAttachmentFormat( stencilAttachmentFormat_ )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ RenderingAreaInfoKHR & operator=( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ RenderingAreaInfoKHR & operator=( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ viewMask = viewMask_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = colorAttachmentCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR &
+ setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pColorAttachmentFormats = pColorAttachmentFormats_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ RenderingAreaInfoKHR & setColorAttachmentFormats(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
+ pColorAttachmentFormats = colorAttachmentFormats_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ depthAttachmentFormat = depthAttachmentFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stencilAttachmentFormat = stencilAttachmentFormat_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkRenderingAreaInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderingAreaInfoKHR *>( this );
+ }
+
+ operator VkRenderingAreaInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderingAreaInfoKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ uint32_t const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::Format * const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::Format const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( RenderingAreaInfoKHR const & ) const = default;
+#else
+ bool operator==( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
+ ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
+ ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
+# endif
+ }
+
+ bool operator!=( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfoKHR;
+ const void * pNext = {};
+ uint32_t viewMask = {};
+ uint32_t colorAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eRenderingAreaInfoKHR>
+ {
+ using Type = RenderingAreaInfoKHR;
+ };
+
struct RenderingAttachmentInfo
{
using NativeType = VkRenderingAttachmentInfo;
@@ -100877,44 +102442,44 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SubresourceHostMemcpySizeEXT;
};
- struct SubresourceLayout2EXT
+ struct SubresourceLayout2KHR
{
- using NativeType = VkSubresourceLayout2EXT;
+ using NativeType = VkSubresourceLayout2KHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2EXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, subresourceLayout( subresourceLayout_ )
{
}
- VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubresourceLayout2EXT( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubresourceLayout2EXT( *reinterpret_cast<SubresourceLayout2EXT const *>( &rhs ) )
+ SubresourceLayout2KHR( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubresourceLayout2KHR( *reinterpret_cast<SubresourceLayout2KHR const *>( &rhs ) )
{
}
- SubresourceLayout2EXT & operator=( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SubresourceLayout2KHR & operator=( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SubresourceLayout2EXT & operator=( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubresourceLayout2KHR & operator=( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR const *>( &rhs );
return *this;
}
- operator VkSubresourceLayout2EXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubresourceLayout2KHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubresourceLayout2EXT *>( this );
+ return *reinterpret_cast<const VkSubresourceLayout2KHR *>( this );
}
- operator VkSubresourceLayout2EXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSubresourceLayout2KHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubresourceLayout2EXT *>( this );
+ return *reinterpret_cast<VkSubresourceLayout2KHR *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
@@ -100930,9 +102495,9 @@ namespace VULKAN_HPP_NAMESPACE
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( SubresourceLayout2EXT const & ) const = default;
+ auto operator<=>( SubresourceLayout2KHR const & ) const = default;
#else
- bool operator==( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
@@ -100941,23 +102506,24 @@ namespace VULKAN_HPP_NAMESPACE
# endif
}
- bool operator!=( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2EXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {};
};
template <>
- struct CppType<StructureType, StructureType::eSubresourceLayout2EXT>
+ struct CppType<StructureType, StructureType::eSubresourceLayout2KHR>
{
- using Type = SubresourceLayout2EXT;
+ using Type = SubresourceLayout2KHR;
};
+ using SubresourceLayout2EXT = SubresourceLayout2KHR;
struct SurfaceCapabilities2EXT
{
diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp
index 7e3a8f5..d04d891 100644
--- a/include/vulkan/vulkan_to_string.hpp
+++ b/include/vulkan/vulkan_to_string.hpp
@@ -600,6 +600,10 @@ namespace VULKAN_HPP_NAMESPACE
result += "TransformFeedbackCounterBufferEXT | ";
if ( value & BufferUsageFlagBits::eConditionalRenderingEXT )
result += "ConditionalRenderingEXT | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX )
+ result += "ExecutionGraphScratchAMDX | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR )
result += "AccelerationStructureBuildInputReadOnlyKHR | ";
if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR )
@@ -3303,6 +3307,148 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
+ //=== VK_KHR_maintenance5 ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2KHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineCreateFlagBits2KHR::eDisableOptimization )
+ result += "DisableOptimization | ";
+ if ( value & PipelineCreateFlagBits2KHR::eAllowDerivatives )
+ result += "AllowDerivatives | ";
+ if ( value & PipelineCreateFlagBits2KHR::eDerivative )
+ result += "Derivative | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & PipelineCreateFlagBits2KHR::eReserved28NV )
+ result += "Reserved28NV | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex )
+ result += "ViewIndexFromDeviceIndex | ";
+ if ( value & PipelineCreateFlagBits2KHR::eDispatchBase )
+ result += "DispatchBase | ";
+ if ( value & PipelineCreateFlagBits2KHR::eDeferCompile )
+ result += "DeferCompile | ";
+ if ( value & PipelineCreateFlagBits2KHR::eCaptureStatistics )
+ result += "CaptureStatistics | ";
+ if ( value & PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations )
+ result += "CaptureInternalRepresentations | ";
+ if ( value & PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired )
+ result += "FailOnPipelineCompileRequired | ";
+ if ( value & PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure )
+ result += "EarlyReturnOnFailure | ";
+ if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimization )
+ result += "LinkTimeOptimization | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo )
+ result += "RetainLinkTimeOptimizationInfo | ";
+ if ( value & PipelineCreateFlagBits2KHR::eLibrary )
+ result += "Library | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles )
+ result += "RayTracingSkipTriangles | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs )
+ result += "RayTracingSkipAabbs | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders )
+ result += "RayTracingNoNullAnyHitShaders | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders )
+ result += "RayTracingNoNullClosestHitShaders | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders )
+ result += "RayTracingNoNullMissShaders | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders )
+ result += "RayTracingNoNullIntersectionShaders | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay )
+ result += "RayTracingShaderGroupHandleCaptureReplay | ";
+ if ( value & PipelineCreateFlagBits2KHR::eIndirectBindable )
+ result += "IndirectBindable | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotion )
+ result += "RayTracingAllowMotion | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment )
+ result += "RenderingFragmentShadingRateAttachment | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment )
+ result += "RenderingFragmentDensityMapAttachment | ";
+ if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap )
+ result += "RayTracingOpacityMicromap | ";
+ if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop )
+ result += "ColorAttachmentFeedbackLoop | ";
+ if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop )
+ result += "DepthStencilAttachmentFeedbackLoop | ";
+ if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccess )
+ result += "NoProtectedAccess | ";
+ if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnly )
+ result += "ProtectedAccessOnly | ";
+ if ( value & PipelineCreateFlagBits2KHR::eDescriptorBuffer )
+ result += "DescriptorBuffer | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2KHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & BufferUsageFlagBits2KHR::eTransferSrc )
+ result += "TransferSrc | ";
+ if ( value & BufferUsageFlagBits2KHR::eTransferDst )
+ result += "TransferDst | ";
+ if ( value & BufferUsageFlagBits2KHR::eUniformTexelBuffer )
+ result += "UniformTexelBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eStorageTexelBuffer )
+ result += "StorageTexelBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eUniformBuffer )
+ result += "UniformBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eStorageBuffer )
+ result += "StorageBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eIndexBuffer )
+ result += "IndexBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eVertexBuffer )
+ result += "VertexBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eIndirectBuffer )
+ result += "IndirectBuffer | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX )
+ result += "ExecutionGraphScratchAMDX | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & BufferUsageFlagBits2KHR::eConditionalRendering )
+ result += "ConditionalRendering | ";
+ if ( value & BufferUsageFlagBits2KHR::eShaderBindingTable )
+ result += "ShaderBindingTable | ";
+ if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBuffer )
+ result += "TransformFeedbackBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer )
+ result += "TransformFeedbackCounterBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eVideoDecodeSrc )
+ result += "VideoDecodeSrc | ";
+ if ( value & BufferUsageFlagBits2KHR::eVideoDecodeDst )
+ result += "VideoDecodeDst | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & BufferUsageFlagBits2KHR::eVideoEncodeDst )
+ result += "VideoEncodeDst | ";
+ if ( value & BufferUsageFlagBits2KHR::eVideoEncodeSrc )
+ result += "VideoEncodeSrc | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & BufferUsageFlagBits2KHR::eShaderDeviceAddress )
+ result += "ShaderDeviceAddress | ";
+ if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly )
+ result += "AccelerationStructureBuildInputReadOnly | ";
+ if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureStorage )
+ result += "AccelerationStructureStorage | ";
+ if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer )
+ result += "SamplerDescriptorBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBuffer )
+ result += "ResourceDescriptorBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer )
+ result += "PushDescriptorsDescriptorBuffer | ";
+ if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly )
+ result += "MicromapBuildInputReadOnly | ";
+ if ( value & BufferUsageFlagBits2KHR::eMicromapStorage )
+ result += "MicromapStorage | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
//=== VK_EXT_shader_object ===
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value )
@@ -3830,6 +3976,13 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID";
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX: return "PhysicalDeviceShaderEnqueueFeaturesAMDX";
+ case StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX: return "PhysicalDeviceShaderEnqueuePropertiesAMDX";
+ case StructureType::eExecutionGraphPipelineScratchSizeAMDX: return "ExecutionGraphPipelineScratchSizeAMDX";
+ case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX";
+ case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT";
@@ -4109,8 +4262,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT";
case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT";
- case StructureType::eSubresourceLayout2EXT: return "SubresourceLayout2EXT";
- case StructureType::eImageSubresource2EXT: return "ImageSubresource2EXT";
case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT";
case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT";
case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT";
@@ -4239,6 +4390,14 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV";
case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT";
case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
+ case StructureType::ePhysicalDeviceMaintenance5FeaturesKHR: return "PhysicalDeviceMaintenance5FeaturesKHR";
+ case StructureType::ePhysicalDeviceMaintenance5PropertiesKHR: return "PhysicalDeviceMaintenance5PropertiesKHR";
+ case StructureType::eRenderingAreaInfoKHR: return "RenderingAreaInfoKHR";
+ case StructureType::eDeviceImageSubresourceInfoKHR: return "DeviceImageSubresourceInfoKHR";
+ case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR";
+ case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR";
+ case StructureType::ePipelineCreateFlags2CreateInfoKHR: return "PipelineCreateFlags2CreateInfoKHR";
+ case StructureType::eBufferUsageFlags2CreateInfoKHR: return "BufferUsageFlags2CreateInfoKHR";
case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR";
case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT";
case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT";
@@ -4608,6 +4767,8 @@ namespace VULKAN_HPP_NAMESPACE
case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
case Format::eR16G16S105NV: return "R16G16S105NV";
+ case Format::eA1B5G5R5UnormPack16KHR: return "A1B5G5R5UnormPack16KHR";
+ case Format::eA8UnormKHR: return "A8UnormKHR";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -5046,6 +5207,9 @@ namespace VULKAN_HPP_NAMESPACE
case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT";
case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT";
case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR";
case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR";
case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR";
@@ -5784,6 +5948,9 @@ namespace VULKAN_HPP_NAMESPACE
{
case PipelineBindPoint::eGraphics: return "Graphics";
case PipelineBindPoint::eCompute: return "Compute";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case PipelineBindPoint::eExecutionGraphAMDX: return "ExecutionGraphAMDX";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR";
case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
@@ -8516,6 +8683,87 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ //=== VK_KHR_maintenance5 ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2KHR value )
+ {
+ switch ( value )
+ {
+ case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization";
+ case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives";
+ case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case PipelineCreateFlagBits2KHR::eReserved28NV: return "Reserved28NV";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
+ case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase";
+ case PipelineCreateFlagBits2KHR::eDeferCompile: return "DeferCompile";
+ case PipelineCreateFlagBits2KHR::eCaptureStatistics: return "CaptureStatistics";
+ case PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations: return "CaptureInternalRepresentations";
+ case PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired";
+ case PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure: return "EarlyReturnOnFailure";
+ case PipelineCreateFlagBits2KHR::eLinkTimeOptimization: return "LinkTimeOptimization";
+ case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo: return "RetainLinkTimeOptimizationInfo";
+ case PipelineCreateFlagBits2KHR::eLibrary: return "Library";
+ case PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles: return "RayTracingSkipTriangles";
+ case PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs: return "RayTracingSkipAabbs";
+ case PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders: return "RayTracingNoNullAnyHitShaders";
+ case PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders: return "RayTracingNoNullClosestHitShaders";
+ case PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders: return "RayTracingNoNullMissShaders";
+ case PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders: return "RayTracingNoNullIntersectionShaders";
+ case PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay: return "RayTracingShaderGroupHandleCaptureReplay";
+ case PipelineCreateFlagBits2KHR::eIndirectBindable: return "IndirectBindable";
+ case PipelineCreateFlagBits2KHR::eRayTracingAllowMotion: return "RayTracingAllowMotion";
+ case PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment: return "RenderingFragmentShadingRateAttachment";
+ case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment: return "RenderingFragmentDensityMapAttachment";
+ case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap: return "RayTracingOpacityMicromap";
+ case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop: return "ColorAttachmentFeedbackLoop";
+ case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop: return "DepthStencilAttachmentFeedbackLoop";
+ case PipelineCreateFlagBits2KHR::eNoProtectedAccess: return "NoProtectedAccess";
+ case PipelineCreateFlagBits2KHR::eProtectedAccessOnly: return "ProtectedAccessOnly";
+ case PipelineCreateFlagBits2KHR::eDescriptorBuffer: return "DescriptorBuffer";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2KHR value )
+ {
+ switch ( value )
+ {
+ case BufferUsageFlagBits2KHR::eTransferSrc: return "TransferSrc";
+ case BufferUsageFlagBits2KHR::eTransferDst: return "TransferDst";
+ case BufferUsageFlagBits2KHR::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case BufferUsageFlagBits2KHR::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case BufferUsageFlagBits2KHR::eUniformBuffer: return "UniformBuffer";
+ case BufferUsageFlagBits2KHR::eStorageBuffer: return "StorageBuffer";
+ case BufferUsageFlagBits2KHR::eIndexBuffer: return "IndexBuffer";
+ case BufferUsageFlagBits2KHR::eVertexBuffer: return "VertexBuffer";
+ case BufferUsageFlagBits2KHR::eIndirectBuffer: return "IndirectBuffer";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case BufferUsageFlagBits2KHR::eConditionalRendering: return "ConditionalRendering";
+ case BufferUsageFlagBits2KHR::eShaderBindingTable: return "ShaderBindingTable";
+ case BufferUsageFlagBits2KHR::eTransformFeedbackBuffer: return "TransformFeedbackBuffer";
+ case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer: return "TransformFeedbackCounterBuffer";
+ case BufferUsageFlagBits2KHR::eVideoDecodeSrc: return "VideoDecodeSrc";
+ case BufferUsageFlagBits2KHR::eVideoDecodeDst: return "VideoDecodeDst";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case BufferUsageFlagBits2KHR::eVideoEncodeDst: return "VideoEncodeDst";
+ case BufferUsageFlagBits2KHR::eVideoEncodeSrc: return "VideoEncodeSrc";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case BufferUsageFlagBits2KHR::eShaderDeviceAddress: return "ShaderDeviceAddress";
+ case BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly: return "AccelerationStructureBuildInputReadOnly";
+ case BufferUsageFlagBits2KHR::eAccelerationStructureStorage: return "AccelerationStructureStorage";
+ case BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer: return "SamplerDescriptorBuffer";
+ case BufferUsageFlagBits2KHR::eResourceDescriptorBuffer: return "ResourceDescriptorBuffer";
+ case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer: return "PushDescriptorsDescriptorBuffer";
+ case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly: return "MicromapBuildInputReadOnly";
+ case BufferUsageFlagBits2KHR::eMicromapStorage: return "MicromapStorage";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
//=== VK_EXT_shader_object ===
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value )