diff options
author | Jon Leech <[email protected]> | 2024-10-11 01:34:23 -0700 |
---|---|---|
committer | Jon Leech <[email protected]> | 2024-10-11 01:41:38 -0700 |
commit | d91597a82f881d473887b560a03a7edf2720b72c (patch) | |
tree | 5c04a18d1e7f1251e04548051d6438189eb8c88c | |
parent | 14345dab231912ee9601136e96ca67a6e1f632e7 (diff) | |
download | Vulkan-Headers-d91597a82f881d473887b560a03a7edf2720b72c.tar.gz Vulkan-Headers-d91597a82f881d473887b560a03a7edf2720b72c.zip |
Update for Vulkan-Docs 1.3.298v1.3.298
-rw-r--r-- | include/vulkan/vulkan.hpp | 22 | ||||
-rw-r--r-- | include/vulkan/vulkan_beta.h | 28 | ||||
-rw-r--r-- | include/vulkan/vulkan_core.h | 9 | ||||
-rw-r--r-- | include/vulkan/vulkan_enums.hpp | 16 | ||||
-rw-r--r-- | include/vulkan/vulkan_extension_inspection.hpp | 8 | ||||
-rw-r--r-- | include/vulkan/vulkan_funcs.hpp | 2153 | ||||
-rw-r--r-- | include/vulkan/vulkan_handles.hpp | 9 | ||||
-rw-r--r-- | include/vulkan/vulkan_hash.hpp | 10 | ||||
-rw-r--r-- | include/vulkan/vulkan_raii.hpp | 32 | ||||
-rw-r--r-- | include/vulkan/vulkan_structs.hpp | 135 | ||||
-rw-r--r-- | include/vulkan/vulkan_to_string.hpp | 7 | ||||
-rw-r--r-- | registry/validusage.json | 156 | ||||
-rw-r--r-- | registry/vk.xml | 67 |
13 files changed, 1611 insertions, 1041 deletions
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index 85a35f6..91a0724 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -63,7 +63,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include <span> #endif -static_assert( VK_HEADER_VERSION == 297, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 298, "Wrong VK_HEADER_VERSION!" ); // <tuple> includes <sys/sysmacros.h> through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -3632,28 +3632,36 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); } - void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, scratch ); + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); } void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, pCountInfo ); + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); } void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, pCountInfo ); + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); } - void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, countInfo ); + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h index df18b40..f5e9475 100644 --- a/include/vulkan/vulkan_beta.h +++ b/include/vulkan/vulkan_beta.h @@ -53,13 +53,14 @@ typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { // VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. #define VK_AMDX_shader_enqueue 1 -#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 2 #define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" #define VK_SHADER_INDEX_UNUSED_AMDX (~0U) typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { VkStructureType sType; void* pNext; VkBool32 shaderEnqueue; + VkBool32 shaderMeshEnqueue; } VkPhysicalDeviceShaderEnqueueFeaturesAMDX; typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { @@ -70,12 +71,16 @@ typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { uint32_t maxExecutionGraphShaderPayloadSize; uint32_t maxExecutionGraphShaderPayloadCount; uint32_t executionGraphDispatchAddressAlignment; + uint32_t maxExecutionGraphWorkgroupCount[3]; + uint32_t maxExecutionGraphWorkgroups; } VkPhysicalDeviceShaderEnqueuePropertiesAMDX; typedef struct VkExecutionGraphPipelineScratchSizeAMDX { VkStructureType sType; void* pNext; - VkDeviceSize size; + VkDeviceSize minSize; + VkDeviceSize maxSize; + VkDeviceSize sizeGranularity; } VkExecutionGraphPipelineScratchSizeAMDX; typedef struct VkExecutionGraphPipelineCreateInfoAMDX { @@ -116,12 +121,12 @@ typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { } VkPipelineShaderStageNodeCreateInfoAMDX; typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); -typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, VkDeviceSize scratchSize); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, VkDeviceAddress countInfo); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( @@ -145,21 +150,26 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch); + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize); VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, VkDeviceAddress countInfo); #endif diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index 1dc7e23..d8d673f 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 297 +#define VK_HEADER_VERSION 298 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -11198,6 +11198,9 @@ typedef VkFlags64 VkPipelineCreateFlagBits2KHR; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; +#endif static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; @@ -11243,7 +11246,9 @@ static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +#endif static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; @@ -19641,7 +19646,7 @@ typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT { typedef struct VkGeneratedCommandsMemoryRequirementsInfoEXT { VkStructureType sType; - void* pNext; + const void* pNext; VkIndirectExecutionSetEXT indirectExecutionSet; VkIndirectCommandsLayoutEXT indirectCommandsLayout; uint32_t maxSequenceCount; diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp index 92e2bf9..44eef6a 100644 --- a/include/vulkan/vulkan_enums.hpp +++ b/include/vulkan/vulkan_enums.hpp @@ -7025,9 +7025,12 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR { - eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, - eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, - eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, @@ -7068,8 +7071,11 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = - PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative | - PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | + PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits2KHR::eExecutionGraphAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp index e79c569..3b158f8 100644 --- a/include/vulkan/vulkan_extension_inspection.hpp +++ b/include/vulkan/vulkan_extension_inspection.hpp @@ -994,16 +994,14 @@ namespace VULKAN_HPP_NAMESPACE { "VK_AMDX_shader_enqueue", { { "VK_VERSION_1_0", { { - "VK_KHR_get_physical_device_properties2", - } } }, - { "VK_VERSION_1_1", - { { "VK_KHR_synchronization2", + "VK_KHR_spirv_1_4", + "VK_EXT_extended_dynamic_state", } } }, { "VK_VERSION_1_3", { { + "VK_KHR_maintenance5", "VK_KHR_pipeline_library", - "VK_KHR_spirv_1_4", } } } } }, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ { "VK_EXT_inline_uniform_block", diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp index 95ffd72..b07c66b 100644 --- a/include/vulkan/vulkan_funcs.hpp +++ b/include/vulkan/vulkan_funcs.hpp @@ -79,7 +79,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -102,7 +102,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); + return static_cast<Result>( + d.vkEnumeratePhysicalDevices( static_cast<VkInstance>( m_instance ), pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -175,7 +176,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -201,7 +202,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -231,7 +233,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), @@ -275,7 +277,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); + d.vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -302,7 +304,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -363,7 +365,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -387,7 +390,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetInstanceProcAddr( m_instance, pName ); + return d.vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -409,7 +412,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceProcAddr( m_device, pName ); + return d.vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -434,7 +437,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, + return static_cast<Result>( d.vkCreateDevice( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDevice *>( pDevice ) ) ); @@ -491,7 +494,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -597,8 +600,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); + return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -753,7 +756,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); + return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -827,7 +831,7 @@ namespace VULKAN_HPP_NAMESPACE Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); + d.vkGetDeviceQueue( static_cast<VkDevice>( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -854,7 +858,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); + return static_cast<Result>( + d.vkQueueSubmit( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -880,7 +885,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); + return static_cast<Result>( d.vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) ) ); } #else template <typename Dispatch> @@ -903,7 +908,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); + return static_cast<Result>( d.vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) ) ); } #else template <typename Dispatch> @@ -928,7 +933,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAllocateMemory( m_device, + return static_cast<Result>( d.vkAllocateMemory( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); @@ -989,7 +994,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1015,7 +1020,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1044,7 +1049,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkMapMemory( m_device, + return static_cast<Result>( d.vkMapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), @@ -1082,7 +1087,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) ); + d.vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ) ); } template <typename Dispatch> @@ -1091,7 +1096,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); + return static_cast<Result>( + d.vkFlushMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1120,7 +1126,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); + d.vkInvalidateMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1148,7 +1154,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); + d.vkGetDeviceMemoryCommitment( + static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1176,8 +1183,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); + return static_cast<Result>( d.vkBindBufferMemory( + static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); } #else template <typename Dispatch> @@ -1205,8 +1212,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); + return static_cast<Result>( d.vkBindImageMemory( + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); } #else template <typename Dispatch> @@ -1232,7 +1239,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements( + static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1258,7 +1266,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements( + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1285,7 +1294,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements( m_device, + d.vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); @@ -1363,7 +1372,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), @@ -1473,8 +1482,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); + return static_cast<Result>( d.vkQueueBindSparse( + static_cast<VkQueue>( m_queue ), bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1502,7 +1511,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateFence( m_device, + return static_cast<Result>( d.vkCreateFence( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) ); @@ -1559,7 +1568,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1585,7 +1594,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1611,7 +1620,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); + return static_cast<Result>( d.vkResetFences( static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1637,7 +1646,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); + return static_cast<Result>( d.vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ) ) ); } #else template <typename Dispatch> @@ -1664,8 +1673,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); + return static_cast<Result>( d.vkWaitForFences( + static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1697,7 +1706,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateSemaphore( m_device, + return static_cast<Result>( d.vkCreateSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); @@ -1758,7 +1767,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySemaphore( + static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1784,7 +1794,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySemaphore( + static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1811,7 +1822,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateEvent( m_device, + return static_cast<Result>( d.vkCreateEvent( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkEvent *>( pEvent ) ) ); @@ -1868,7 +1879,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1894,7 +1905,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1919,7 +1930,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); + return static_cast<Result>( d.vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); } #else template <typename Dispatch> @@ -1943,7 +1954,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); + return static_cast<Result>( d.vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); } #else template <typename Dispatch> @@ -1967,7 +1978,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); + return static_cast<Result>( d.vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); } #else template <typename Dispatch> @@ -1992,7 +2003,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateQueryPool( m_device, + return static_cast<Result>( d.vkCreateQueryPool( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); @@ -2053,7 +2064,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyQueryPool( + static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2079,7 +2091,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyQueryPool( + static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2110,7 +2123,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetQueryPoolResults( m_device, + return static_cast<Result>( d.vkGetQueryPoolResults( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, @@ -2189,7 +2202,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateBuffer( m_device, + return static_cast<Result>( d.vkCreateBuffer( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBuffer *>( pBuffer ) ) ); @@ -2246,7 +2259,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2272,7 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2299,7 +2312,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateBufferView( m_device, + return static_cast<Result>( d.vkCreateBufferView( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferView *>( pView ) ) ); @@ -2360,7 +2373,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyBufferView( + static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2386,7 +2400,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyBufferView( + static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2413,7 +2428,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateImage( m_device, + return static_cast<Result>( d.vkCreateImage( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImage *>( pImage ) ) ); @@ -2470,7 +2485,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2496,7 +2511,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2523,7 +2538,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout( m_device, + d.vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); @@ -2556,7 +2571,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateImageView( m_device, + return static_cast<Result>( d.vkCreateImageView( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImageView *>( pView ) ) ); @@ -2617,7 +2632,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyImageView( + static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2643,7 +2659,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyImageView( + static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2670,7 +2687,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateShaderModule( m_device, + return static_cast<Result>( d.vkCreateShaderModule( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); @@ -2731,7 +2748,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyShaderModule( + static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2757,7 +2775,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyShaderModule( + static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2784,7 +2803,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreatePipelineCache( m_device, + return static_cast<Result>( d.vkCreatePipelineCache( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); @@ -2845,7 +2864,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipelineCache( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2871,7 +2891,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipelineCache( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2898,7 +2919,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); + return static_cast<Result>( + d.vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2976,8 +2998,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); + return static_cast<Result>( d.vkMergePipelineCaches( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3009,7 +3031,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + return static_cast<Result>( d.vkCreateGraphicsPipelines( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), @@ -3212,7 +3234,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateComputePipelines( m_device, + return static_cast<Result>( d.vkCreateComputePipelines( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), @@ -3412,7 +3434,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipeline( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3438,7 +3461,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipeline( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3465,7 +3489,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreatePipelineLayout( m_device, + return static_cast<Result>( d.vkCreatePipelineLayout( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); @@ -3526,7 +3550,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipelineLayout( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3552,7 +3577,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipelineLayout( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3579,7 +3605,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateSampler( m_device, + return static_cast<Result>( d.vkCreateSampler( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSampler *>( pSampler ) ) ); @@ -3636,7 +3662,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3662,7 +3688,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3689,7 +3715,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, + return static_cast<Result>( d.vkCreateDescriptorSetLayout( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); @@ -3750,8 +3776,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorSetLayout( - m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3778,8 +3805,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorSetLayout( - m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3807,7 +3835,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDescriptorPool( m_device, + return static_cast<Result>( d.vkCreateDescriptorPool( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); @@ -3868,7 +3896,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorPool( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3894,7 +3923,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorPool( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3921,8 +3951,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); + return static_cast<Result>( d.vkResetDescriptorPool( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); } #else template <typename Dispatch> @@ -3945,8 +3975,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAllocateDescriptorSets( - m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); + return static_cast<Result>( d.vkAllocateDescriptorSets( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), + reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4052,8 +4083,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkFreeDescriptorSets( - m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); + return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorPool>( descriptorPool ), + descriptorSetCount, + reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4079,8 +4112,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkFreeDescriptorSets( - m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); + return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorPool>( descriptorPool ), + descriptorSetCount, + reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4107,7 +4142,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSets( m_device, + d.vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ), descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, @@ -4141,7 +4176,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateFramebuffer( m_device, + return static_cast<Result>( d.vkCreateFramebuffer( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); @@ -4202,7 +4237,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyFramebuffer( + static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4228,7 +4264,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyFramebuffer( + static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4255,7 +4292,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateRenderPass( m_device, + return static_cast<Result>( d.vkCreateRenderPass( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); @@ -4316,7 +4353,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyRenderPass( + static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4342,7 +4380,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyRenderPass( + static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4368,7 +4407,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); + d.vkGetRenderAreaGranularity( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4395,7 +4434,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateCommandPool( m_device, + return static_cast<Result>( d.vkCreateCommandPool( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); @@ -4456,7 +4495,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCommandPool( + static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4482,7 +4522,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCommandPool( + static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4509,7 +4550,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); + return static_cast<Result>( + d.vkResetCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); } #else template <typename Dispatch> @@ -4535,8 +4577,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAllocateCommandBuffers( - m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); + return static_cast<Result>( d.vkAllocateCommandBuffers( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), + reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4642,8 +4685,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeCommandBuffers( - m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); + d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), + static_cast<VkCommandPool>( commandPool ), + commandBufferCount, + reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4669,8 +4714,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeCommandBuffers( - m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); + d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), + static_cast<VkCommandPool>( commandPool ), + commandBufferCount, + reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4694,7 +4741,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); + return static_cast<Result>( + d.vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4720,7 +4768,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); + return static_cast<Result>( d.vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) ) ); } #else template <typename Dispatch> @@ -4744,7 +4792,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); + return static_cast<Result>( d.vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) ) ); } #else template <typename Dispatch> @@ -4769,7 +4817,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); + d.vkCmdBindPipeline( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); } template <typename Dispatch> @@ -4779,7 +4828,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); + d.vkCmdSetViewport( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4804,7 +4853,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); + d.vkCmdSetScissor( static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4826,7 +4875,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + d.vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth ); } template <typename Dispatch> @@ -4834,21 +4883,21 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + d.vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + d.vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + d.vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds ); } template <typename Dispatch> @@ -4856,7 +4905,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); + d.vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); } template <typename Dispatch> @@ -4864,7 +4913,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); + d.vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); } template <typename Dispatch> @@ -4872,7 +4921,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference ); + d.vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference ); } template <typename Dispatch> @@ -4886,7 +4935,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorSets( m_commandBuffer, + d.vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, @@ -4928,7 +4977,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) ); + d.vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkIndexType>( indexType ) ); } template <typename Dispatch> @@ -4939,8 +4991,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers( - m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); + d.vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast<const VkBuffer *>( pBuffers ), + reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4976,7 +5031,7 @@ namespace VULKAN_HPP_NAMESPACE uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + d.vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); } template <typename Dispatch> @@ -4988,7 +5043,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + d.vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } template <typename Dispatch> @@ -4999,7 +5054,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + d.vkCmdDrawIndirect( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); } template <typename Dispatch> @@ -5010,7 +5066,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + d.vkCmdDrawIndexedIndirect( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); } template <typename Dispatch> @@ -5018,7 +5075,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template <typename Dispatch> @@ -5027,7 +5084,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); + d.vkCmdDispatchIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); } template <typename Dispatch> @@ -5038,7 +5095,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer( m_commandBuffer, + d.vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, @@ -5075,7 +5132,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage( m_commandBuffer, + d.vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), @@ -5119,7 +5176,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage( m_commandBuffer, + d.vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), @@ -5164,7 +5221,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage( m_commandBuffer, + d.vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), @@ -5203,7 +5260,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer( m_commandBuffer, + d.vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), @@ -5241,8 +5298,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdUpdateBuffer( - m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData ); + d.vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + static_cast<VkDeviceSize>( dataSize ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5273,7 +5333,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data ); + d.vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + static_cast<VkDeviceSize>( size ), + data ); } template <typename Dispatch> @@ -5285,7 +5349,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearColorImage( m_commandBuffer, + d.vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), @@ -5324,7 +5388,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearDepthStencilImage( m_commandBuffer, + d.vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), @@ -5363,7 +5427,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearAttachments( m_commandBuffer, + d.vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ), attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, @@ -5399,7 +5463,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage( m_commandBuffer, + d.vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), @@ -5438,7 +5502,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); + d.vkCmdSetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); } template <typename Dispatch> @@ -5447,7 +5511,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); + d.vkCmdResetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); } template <typename Dispatch> @@ -5464,7 +5528,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents( m_commandBuffer, + d.vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ), eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), @@ -5520,7 +5584,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier( m_commandBuffer, + d.vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), @@ -5568,14 +5632,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); + d.vkCmdBeginQuery( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query ); + d.vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query ); } template <typename Dispatch> @@ -5585,7 +5650,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); + d.vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); } template <typename Dispatch> @@ -5595,7 +5660,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); + d.vkCmdWriteTimestamp( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); } template <typename Dispatch> @@ -5609,7 +5675,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyQueryPoolResults( m_commandBuffer, + d.vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, @@ -5628,7 +5694,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues ); + d.vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineLayout>( layout ), + static_cast<VkShaderStageFlags>( stageFlags ), + offset, + size, + pValues ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5659,7 +5730,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) ); + d.vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), + static_cast<VkSubpassContents>( contents ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5681,14 +5754,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) ); + d.vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass( m_commandBuffer ); + d.vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } template <typename Dispatch> @@ -5697,7 +5770,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); + d.vkCmdExecuteCommands( static_cast<VkCommandBuffer>( m_commandBuffer ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5746,7 +5819,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); + return static_cast<Result>( + d.vkBindBufferMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5773,7 +5847,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); + return static_cast<Result>( + d.vkBindImageMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5803,7 +5878,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeatures( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); + static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5829,7 +5904,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + d.vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask ); } template <typename Dispatch> @@ -5842,7 +5917,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBase( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } template <typename Dispatch> @@ -5852,8 +5927,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( - m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); + return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5935,8 +6011,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements2( - m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5982,8 +6059,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements2( - m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6030,7 +6108,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2( m_device, + d.vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); @@ -6105,7 +6183,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6148,7 +6226,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); + d.vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6192,7 +6270,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6237,7 +6316,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); } @@ -6292,7 +6371,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6428,7 +6507,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6474,7 +6554,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); @@ -6550,7 +6630,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); + d.vkTrimCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); } template <typename Dispatch> @@ -6559,7 +6639,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); + d.vkGetDeviceQueue2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6587,7 +6667,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, + return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); @@ -6650,8 +6730,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversion( - m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), + static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6679,8 +6760,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversion( - m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), + static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6710,7 +6792,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, + return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); @@ -6774,8 +6856,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplate( - m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6803,8 +6886,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplate( - m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6833,8 +6917,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSetWithTemplate( - m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSet>( descriptorSet ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6863,7 +6949,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); } @@ -6895,7 +6981,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); } @@ -6928,7 +7014,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); } @@ -6960,8 +7046,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSupport( - m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); + d.vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7013,7 +7100,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCount( m_commandBuffer, + d.vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -7032,7 +7119,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -7048,7 +7135,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateRenderPass2( m_device, + return static_cast<Result>( d.vkCreateRenderPass2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); @@ -7109,8 +7196,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass2( - m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7135,8 +7223,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass2( - m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), + reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7160,7 +7249,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7182,7 +7271,7 @@ namespace VULKAN_HPP_NAMESPACE Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); + d.vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); } template <typename Dispatch> @@ -7191,7 +7280,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); + return static_cast<Result>( d.vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7219,7 +7308,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); + return static_cast<Result>( d.vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7246,7 +7335,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); + return static_cast<Result>( d.vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7272,7 +7361,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); + return static_cast<DeviceAddress>( + d.vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7297,7 +7387,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7322,7 +7412,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7350,8 +7441,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7434,7 +7525,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, + return static_cast<Result>( d.vkCreatePrivateDataSlot( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); @@ -7495,7 +7586,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPrivateDataSlot( + static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7522,7 +7614,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPrivateDataSlot( + static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7552,8 +7645,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); + return static_cast<Result>( d.vkSetPrivateData( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); } #else template <typename Dispatch> @@ -7584,7 +7677,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); + d.vkGetPrivateData( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7612,7 +7706,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + d.vkCmdSetEvent2( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7636,7 +7731,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); + d.vkCmdResetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); } template <typename Dispatch> @@ -7646,8 +7741,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents2( - m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); + d.vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ), + eventCount, + reinterpret_cast<const VkEvent *>( pEvents ), + reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7681,7 +7778,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + d.vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7705,7 +7802,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); + d.vkCmdWriteTimestamp2( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); } template <typename Dispatch> @@ -7715,7 +7813,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); + return static_cast<Result>( + d.vkQueueSubmit2( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7741,7 +7840,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); + d.vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7762,7 +7861,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); + d.vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7783,7 +7882,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7805,7 +7904,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7826,7 +7925,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); + d.vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7847,7 +7946,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); + d.vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7869,7 +7968,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); + d.vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7890,21 +7989,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRendering( m_commandBuffer ); + d.vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); + d.vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); + d.vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) ); } template <typename Dispatch> @@ -7912,7 +8011,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); + d.vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) ); } template <typename Dispatch> @@ -7921,7 +8020,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); + d.vkCmdSetViewportWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7944,7 +8043,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); + d.vkCmdSetScissorWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7972,7 +8071,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2( m_commandBuffer, + d.vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), @@ -8028,21 +8127,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); + d.vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); + d.vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); + d.vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) ); } template <typename Dispatch> @@ -8050,14 +8149,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); + d.vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); + d.vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) ); } template <typename Dispatch> @@ -8069,7 +8168,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOp( m_commandBuffer, + d.vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), @@ -8082,14 +8181,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); + d.vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); + d.vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) ); } template <typename Dispatch> @@ -8097,7 +8196,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); + d.vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) ); } template <typename Dispatch> @@ -8106,8 +8205,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceBufferMemoryRequirements( - m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8153,8 +8253,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageMemoryRequirements( - m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8201,7 +8302,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirements( m_device, + d.vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); @@ -8280,7 +8381,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySurfaceKHR( + static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8306,7 +8408,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySurfaceKHR( + static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8334,7 +8437,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( - m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8362,8 +8465,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8392,8 +8496,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( - m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + pSurfaceFormatCount, + reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8473,8 +8579,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( - m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + pPresentModeCount, + reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8556,7 +8664,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, + return static_cast<Result>( d.vkCreateSwapchainKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); @@ -8617,7 +8725,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySwapchainKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8643,7 +8752,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySwapchainKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8670,8 +8780,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); + return static_cast<Result>( d.vkGetSwapchainImagesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8751,8 +8861,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireNextImageKHR( - m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) ); + return static_cast<Result>( d.vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ), + static_cast<VkSwapchainKHR>( swapchain ), + timeout, + static_cast<VkSemaphore>( semaphore ), + static_cast<VkFence>( fence ), + pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8787,7 +8901,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); + return static_cast<Result>( d.vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8814,8 +8928,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); + return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8845,7 +8959,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( - m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); + static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8875,8 +8989,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); + return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8955,7 +9069,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); + return static_cast<Result>( + d.vkAcquireNextImage2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8990,8 +9105,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9067,8 +9182,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9148,8 +9263,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); + return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9225,8 +9340,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( - m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayKHR>( display ), + pPropertyCount, + reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9310,7 +9427,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, + return static_cast<Result>( d.vkCreateDisplayModeKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), @@ -9378,8 +9495,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( - m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); + return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayModeKHR>( mode ), + planeIndex, + reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9408,7 +9527,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -9473,7 +9592,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( static_cast<VkDevice>( m_device ), swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), @@ -9654,7 +9773,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, + return static_cast<Result>( d.vkCreateXlibSurfaceKHR( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -9714,7 +9833,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9745,7 +9865,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, + return static_cast<Result>( d.vkCreateXcbSurfaceKHR( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -9807,7 +9927,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9840,7 +9961,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, + return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -9901,7 +10022,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, display ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9932,7 +10054,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, + return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -9998,7 +10120,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, + return static_cast<Result>( d.vkCreateWin32SurfaceKHR( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -10057,7 +10179,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex ) ); } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -10071,7 +10193,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, + return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); @@ -10133,7 +10255,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDebugReportCallbackEXT( - m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10161,7 +10283,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDebugReportCallbackEXT( - m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10193,7 +10315,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDebugReportMessageEXT( m_instance, + d.vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), object, @@ -10237,7 +10359,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); + return static_cast<Result>( + d.vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10263,7 +10386,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); + return static_cast<Result>( + d.vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10289,7 +10413,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); + d.vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10310,7 +10434,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + d.vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } template <typename Dispatch> @@ -10318,7 +10442,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); + d.vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10343,8 +10467,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( - m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), + reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10392,7 +10517,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), pVideoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); @@ -10486,7 +10611,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, + return static_cast<Result>( d.vkCreateVideoSessionKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); @@ -10547,7 +10672,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyVideoSessionKHR( + static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10574,7 +10700,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyVideoSessionKHR( + static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10603,7 +10730,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), pMemoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); @@ -10698,7 +10825,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, + return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), bindSessionMemoryInfoCount, reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); @@ -10735,7 +10862,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, + return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); @@ -10798,7 +10925,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, + return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); } @@ -10831,8 +10958,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionParametersKHR( - m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10859,8 +10987,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionParametersKHR( - m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10886,7 +11015,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); + d.vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10908,7 +11037,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); + d.vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10930,7 +11059,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); + d.vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10954,7 +11084,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); + d.vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10982,7 +11112,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + d.vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), @@ -11034,7 +11164,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + d.vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), @@ -11078,7 +11208,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + d.vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), @@ -11122,7 +11252,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); + d.vkCmdBeginQueryIndexedEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); } template <typename Dispatch> @@ -11130,7 +11261,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index ); + d.vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index ); } template <typename Dispatch> @@ -11143,7 +11274,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + d.vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), @@ -11161,7 +11292,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, + return static_cast<Result>( d.vkCreateCuModuleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); @@ -11223,7 +11354,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, + return static_cast<Result>( d.vkCreateCuFunctionNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); @@ -11284,7 +11415,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCuModuleNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11310,7 +11442,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCuModuleNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11336,7 +11469,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCuFunctionNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11362,7 +11496,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCuFunctionNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11387,7 +11522,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); + d.vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11411,7 +11546,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); + return d.vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11436,8 +11571,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetImageViewAddressNVX( + static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11471,7 +11606,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + d.vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -11490,7 +11625,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -11510,7 +11645,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, + return static_cast<Result>( d.vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), @@ -11616,7 +11751,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); + d.vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11637,7 +11772,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderingKHR( m_commandBuffer ); + d.vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } #if defined( VK_USE_PLATFORM_GGP ) @@ -11651,7 +11786,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, + return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -11722,7 +11857,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), @@ -11775,8 +11910,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); + return static_cast<Result>( d.vkGetMemoryWin32HandleNV( + static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11806,7 +11941,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11849,7 +11984,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); + d.vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11894,7 +12029,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceFormatProperties2KHR( - m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11941,7 +12076,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); } @@ -11996,7 +12131,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12132,7 +12267,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12178,7 +12314,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); @@ -12260,7 +12396,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeaturesKHR( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); + static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12286,7 +12422,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + d.vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask ); } template <typename Dispatch> @@ -12299,7 +12435,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBaseKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #if defined( VK_USE_PLATFORM_VI_NN ) @@ -12312,7 +12448,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, + return static_cast<Result>( d.vkCreateViSurfaceNN( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -12376,7 +12512,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); + d.vkTrimCommandPoolKHR( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); } //=== VK_KHR_device_group_creation === @@ -12388,8 +12524,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( - m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); + return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12473,7 +12610,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); } @@ -12508,8 +12645,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); + return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12539,7 +12676,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); @@ -12576,7 +12713,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); + return static_cast<Result>( d.vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12605,8 +12742,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( - m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); + return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ), + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + fd, + reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12637,7 +12776,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); } @@ -12671,8 +12810,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); + return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12699,8 +12838,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); + return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12730,7 +12869,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); + return static_cast<Result>( + d.vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12757,7 +12897,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); + return static_cast<Result>( + d.vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12790,7 +12931,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + d.vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, @@ -12829,8 +12970,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetWithTemplateKHR( - m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData ); + d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + static_cast<VkPipelineLayout>( layout ), + set, + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12862,7 +13006,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); + d.vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12883,7 +13028,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + d.vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } //=== VK_KHR_descriptor_update_template === @@ -12896,7 +13041,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, + return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); @@ -12960,8 +13105,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplateKHR( - m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12990,8 +13136,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSetWithTemplateKHR( - m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSet>( descriptorSet ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13023,7 +13171,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); + d.vkCmdSetViewportWScalingNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13050,7 +13199,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); + return static_cast<Result>( d.vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) ); } #else template <typename Dispatch> @@ -13074,7 +13223,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) ); + return static_cast<Result>( d.vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, static_cast<VkDisplayKHR>( display ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13102,7 +13251,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); + return static_cast<Result>( + d.vkGetRandROutputDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13154,8 +13304,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( - m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13186,8 +13337,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); + return static_cast<Result>( d.vkDisplayPowerControlEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13216,7 +13367,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, + return static_cast<Result>( d.vkRegisterDeviceEventEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) ); @@ -13279,7 +13430,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, + return static_cast<Result>( d.vkRegisterDisplayEventEXT( static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), @@ -13346,8 +13497,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); + return static_cast<Result>( d.vkGetSwapchainCounterEXT( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13378,8 +13529,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( - m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); + return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ), + static_cast<VkSwapchainKHR>( swapchain ), + reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13409,7 +13561,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, + return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); @@ -13505,7 +13657,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); + d.vkCmdSetDiscardRectangleEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13529,7 +13682,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) ); + d.vkCmdSetDiscardRectangleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( discardRectangleEnable ) ); } template <typename Dispatch> @@ -13537,7 +13690,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); + d.vkCmdSetDiscardRectangleModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); } //=== VK_EXT_hdr_metadata === @@ -13549,8 +13702,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetHdrMetadataEXT( - m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); + d.vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ), + swapchainCount, + reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), + reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13588,7 +13743,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, + return static_cast<Result>( d.vkCreateRenderPass2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); @@ -13649,8 +13804,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass2KHR( - m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13675,8 +13831,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass2KHR( - m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), + reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13700,7 +13857,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13725,7 +13882,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); + return static_cast<Result>( d.vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); } #else template <typename Dispatch> @@ -13755,7 +13912,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); } @@ -13789,8 +13946,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); + return static_cast<Result>( d.vkImportFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13818,7 +13975,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); + d.vkGetFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13848,7 +14005,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); + return static_cast<Result>( + d.vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13875,7 +14033,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); + return static_cast<Result>( d.vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13909,7 +14067,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), @@ -14025,8 +14183,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), + pNumPasses ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14053,7 +14212,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); + return static_cast<Result>( + d.vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14078,7 +14238,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkReleaseProfilingLockKHR( m_device ); + d.vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) ); } //=== VK_KHR_get_surface_capabilities2 === @@ -14090,7 +14250,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); } @@ -14145,7 +14305,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); @@ -14334,8 +14494,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14415,8 +14575,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14498,8 +14658,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( - m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayKHR>( display ), + pPropertyCount, + reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14583,7 +14745,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); } @@ -14619,7 +14781,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, + return static_cast<Result>( d.vkCreateIOSSurfaceMVK( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -14685,7 +14847,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, + return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -14748,7 +14910,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); + return static_cast<Result>( + d.vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14774,7 +14937,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); + return static_cast<Result>( + d.vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14800,7 +14964,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + d.vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14821,7 +14985,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + d.vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) ); } template <typename Dispatch> @@ -14829,7 +14993,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + d.vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14851,7 +15015,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + d.vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14872,7 +15036,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + d.vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } template <typename Dispatch> @@ -14880,7 +15044,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + d.vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14905,7 +15069,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, + return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); @@ -14966,8 +15130,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), + static_cast<VkDebugUtilsMessengerEXT>( messenger ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14994,8 +15159,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), + static_cast<VkDebugUtilsMessengerEXT>( messenger ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15023,7 +15189,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSubmitDebugUtilsMessageEXT( m_instance, + d.vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ), static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); @@ -15058,8 +15224,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15109,8 +15275,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); + return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15147,7 +15313,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ), @@ -15350,8 +15516,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( - m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); + return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast<VkDevice>( m_device ), + static_cast<VkPipeline>( executionGraph ), + reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15382,8 +15549,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( - m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) ); + return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ), + static_cast<VkPipeline>( executionGraph ), + reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), + pNodeIndex ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15406,25 +15575,35 @@ namespace VULKAN_HPP_NAMESPACE # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template <typename Dispatch> - VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) ); + d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipeline>( executionGraph ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); + d.vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { @@ -15433,23 +15612,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" ); # endif - d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); + d.vkCmdDispatchGraphAMDX( m_commandBuffer, + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchGraphIndirectAMDX( - m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); + d.vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { @@ -15458,18 +15644,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" ); # endif - d.vkCmdDispatchGraphIndirectAMDX( - m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); + d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) ); + d.vkCmdDispatchGraphIndirectCountAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + static_cast<VkDeviceAddress>( countInfo ) ); } #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -15480,7 +15672,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); + d.vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15503,8 +15695,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( - m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSampleCountFlagBits>( samples ), + reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15534,8 +15727,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements2KHR( - m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15581,8 +15775,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements2KHR( - m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15629,7 +15824,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, + d.vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); @@ -15710,7 +15905,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, + return static_cast<Result>( d.vkCreateAccelerationStructureKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); @@ -15772,8 +15967,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureKHR( - m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureKHR>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15800,8 +15996,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureKHR( - m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureKHR>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15830,7 +16027,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + d.vkCmdBuildAccelerationStructuresKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); @@ -15872,7 +16069,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), @@ -15932,7 +16129,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkBuildAccelerationStructuresKHR( m_device, + d.vkBuildAccelerationStructuresKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), @@ -15981,8 +16178,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyAccelerationStructureKHR( - m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); + return static_cast<Result>( d.vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16015,8 +16213,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( - m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); + return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16050,8 +16249,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( - m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); + return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16089,7 +16289,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ), accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), @@ -16162,7 +16362,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); + d.vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16184,7 +16385,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16207,7 +16409,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16230,8 +16433,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<DeviceAddress>( - d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); + return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16263,7 +16466,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), @@ -16301,7 +16504,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); } @@ -16335,7 +16538,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureBuildSizesKHR( m_device, + d.vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), pMaxPrimitiveCounts, @@ -16388,7 +16591,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysKHR( m_commandBuffer, + d.vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), @@ -16436,7 +16639,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, @@ -16672,8 +16875,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16724,8 +16927,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16777,7 +16980,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + d.vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), @@ -16815,15 +17018,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<DeviceSize>( - d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); + return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize ); } //=== VK_KHR_sampler_ycbcr_conversion === @@ -16836,7 +17039,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, + return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); @@ -16899,8 +17102,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversionKHR( - m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroySamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), + static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16930,7 +17134,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); + return static_cast<Result>( + d.vkBindBufferMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16958,7 +17163,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); + return static_cast<Result>( + d.vkBindImageMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16987,7 +17193,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( - m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17019,7 +17225,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, + return static_cast<Result>( d.vkCreateValidationCacheEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); @@ -17081,7 +17287,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyValidationCacheEXT( - m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17109,7 +17315,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyValidationCacheEXT( - m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17137,8 +17343,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkMergeValidationCachesEXT( - m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); + return static_cast<Result>( d.vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ), + static_cast<VkValidationCacheEXT>( dstCache ), + srcCacheCount, + reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17168,7 +17376,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); + return static_cast<Result>( + d.vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17247,7 +17456,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); + d.vkCmdBindShadingRateImageNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); } template <typename Dispatch> @@ -17258,7 +17468,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportShadingRatePaletteNV( - m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); + static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17285,7 +17495,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + d.vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); @@ -17320,7 +17530,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, + return static_cast<Result>( d.vkCreateAccelerationStructureNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); @@ -17382,8 +17592,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureNV( - m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureNV>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17410,8 +17621,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureNV( - m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureNV>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17439,7 +17651,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); } @@ -17490,8 +17702,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); + return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( + static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17524,7 +17736,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + d.vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), @@ -17571,7 +17783,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + d.vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); @@ -17595,7 +17807,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysNV( m_commandBuffer, + d.vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), @@ -17621,7 +17833,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), @@ -17824,8 +18036,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17874,8 +18086,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); + return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( + static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17924,7 +18136,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ), accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), @@ -17963,7 +18175,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); + return static_cast<Result>( d.vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), shader ) ); } #else template <typename Dispatch> @@ -17991,8 +18203,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSupportKHR( - m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); + d.vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18046,7 +18259,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + d.vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -18065,7 +18278,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -18084,7 +18297,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, + return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); @@ -18124,7 +18337,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + d.vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), @@ -18139,8 +18352,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18219,8 +18432,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( - m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ), + timestampCount, + reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18298,7 +18514,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + d.vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask ); } template <typename Dispatch> @@ -18309,7 +18525,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + d.vkCmdDrawMeshTasksIndirectNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); } template <typename Dispatch> @@ -18322,7 +18539,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + d.vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -18340,8 +18557,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExclusiveScissorEnableNV( - m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); + d.vkCmdSetExclusiveScissorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18368,7 +18587,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); + d.vkCmdSetExclusiveScissorNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18393,7 +18613,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + d.vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), pCheckpointMarker ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18415,7 +18635,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); + d.vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18476,7 +18696,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); + return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18504,7 +18724,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); + return static_cast<Result>( + d.vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18531,7 +18752,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); + return static_cast<Result>( d.vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18559,8 +18780,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); + return static_cast<Result>( d.vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18585,7 +18806,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUninitializePerformanceApiINTEL( m_device ); + d.vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) ); } template <typename Dispatch> @@ -18593,7 +18814,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); + return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18619,8 +18841,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); + return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18646,8 +18868,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); + return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18675,7 +18897,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, + return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); } @@ -18730,7 +18952,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + return static_cast<Result>( + d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); } #else template <typename Dispatch> @@ -18756,7 +18979,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + return static_cast<Result>( + d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); } #else template <typename Dispatch> @@ -18782,7 +19006,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + return static_cast<Result>( + d.vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); } #else template <typename Dispatch> @@ -18810,7 +19035,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetPerformanceParameterINTEL( - m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); + static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18840,7 +19065,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); + d.vkSetLocalDimmingAMD( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); } #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -18854,7 +19079,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -18920,7 +19145,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, + return static_cast<Result>( d.vkCreateMetalSurfaceEXT( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -18985,8 +19210,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( - m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); + return static_cast<Result>( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + pFragmentShadingRateCount, + reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19071,8 +19298,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFragmentShadingRateKHR( - m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); + d.vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkExtent2D *>( pFragmentSize ), + reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19098,7 +19326,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( pLocationInfo ) ); + d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( pLocationInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19122,7 +19351,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, + d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInputAttachmentIndexInfoKHR *>( pInputAttachmentIndexInfo ) ); } @@ -19150,7 +19379,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); + return static_cast<DeviceAddress>( + d.vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19178,8 +19408,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19265,7 +19495,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); + return static_cast<Result>( d.vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); } #else template <typename Dispatch> @@ -19296,7 +19526,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19381,7 +19611,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19471,7 +19701,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); @@ -19561,7 +19791,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); + return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); } # else template <typename Dispatch> @@ -19587,7 +19817,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); + return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); } # else template <typename Dispatch> @@ -19614,8 +19844,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( - m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); + return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), + reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19648,7 +19879,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, + return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -19710,7 +19941,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); + return static_cast<DeviceAddress>( + d.vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19735,7 +19967,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19760,7 +19992,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19787,7 +20020,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + d.vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } //=== VK_EXT_host_query_reset === @@ -19799,7 +20032,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); + d.vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); } //=== VK_EXT_extended_dynamic_state === @@ -19808,14 +20041,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); + d.vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); + d.vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) ); } template <typename Dispatch> @@ -19823,7 +20056,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); + d.vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) ); } template <typename Dispatch> @@ -19832,7 +20065,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); + d.vkCmdSetViewportWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19855,7 +20088,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); + d.vkCmdSetScissorWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19883,7 +20116,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + d.vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), @@ -19939,21 +20172,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); + d.vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); + d.vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); + d.vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) ); } template <typename Dispatch> @@ -19961,14 +20194,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); + d.vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); + d.vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) ); } template <typename Dispatch> @@ -19980,7 +20213,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOpEXT( m_commandBuffer, + d.vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), @@ -19996,8 +20229,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDeferredOperationKHR( - m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); + return static_cast<Result>( d.vkCreateDeferredOperationKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20050,7 +20284,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDeferredOperationKHR( - m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20078,7 +20312,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDeferredOperationKHR( - m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20104,7 +20338,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ); } #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20113,7 +20347,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); + return static_cast<Result>( d.vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) ); } #else template <typename Dispatch> @@ -20138,7 +20372,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); + return static_cast<Result>( d.vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) ); } #else template <typename Dispatch> @@ -20170,7 +20404,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, + return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), pExecutableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); @@ -20267,7 +20501,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, + return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), pStatisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); @@ -20365,7 +20599,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, + d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); @@ -20463,7 +20697,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) ); + return static_cast<Result>( + d.vkCopyMemoryToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20489,7 +20724,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) ); + return static_cast<Result>( + d.vkCopyImageToMemoryEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20515,7 +20751,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) ); + return static_cast<Result>( + d.vkCopyImageToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20542,8 +20779,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) ); + return static_cast<Result>( d.vkTransitionImageLayoutEXT( + static_cast<VkDevice>( m_device ), transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20572,7 +20809,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2EXT( m_device, + d.vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); @@ -20629,7 +20866,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) ); + return static_cast<Result>( + d.vkMapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20656,7 +20894,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) ); + return static_cast<Result>( d.vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20684,7 +20922,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) ); + return static_cast<Result>( + d.vkReleaseSwapchainImagesEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20713,7 +20952,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } @@ -20764,7 +21003,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20787,8 +21027,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteGeneratedCommandsNV( - m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBool32>( isPreprocessed ), + reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20814,7 +21055,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); + d.vkCmdBindPipelineShaderGroupNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); } template <typename Dispatch> @@ -20825,7 +21067,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, + return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); @@ -20887,8 +21129,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutNV( - m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20915,8 +21158,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutNV( - m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20944,7 +21188,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); + d.vkCmdSetDepthBias2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20970,7 +21214,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); + return static_cast<Result>( d.vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) ) ); } #else template <typename Dispatch> @@ -20997,7 +21241,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); + return static_cast<Result>( + d.vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21048,7 +21293,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, + return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); @@ -21109,7 +21354,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPrivateDataSlotEXT( + static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21139,8 +21385,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); + return static_cast<Result>( d.vkSetPrivateDataEXT( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); } #else template <typename Dispatch> @@ -21171,7 +21417,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); + d.vkGetPrivateDataEXT( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21203,7 +21450,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ), reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) ); } @@ -21264,7 +21511,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetEncodedVideoSessionParametersKHR( m_device, + d.vkGetEncodedVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ), reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ), pDataSize, @@ -21451,7 +21698,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); + d.vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21478,7 +21725,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateCudaModuleNV( m_device, + return static_cast<Result>( d.vkCreateCudaModuleNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCudaModuleNV *>( pModule ) ) ); @@ -21540,7 +21787,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); + return static_cast<Result>( d.vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21616,7 +21863,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device, + return static_cast<Result>( d.vkCreateCudaFunctionNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) ); @@ -21677,7 +21924,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCudaModuleNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21703,7 +21951,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCudaModuleNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21729,7 +21978,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCudaFunctionNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21755,7 +22005,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyCudaFunctionNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21780,7 +22031,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); + d.vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21806,7 +22057,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); + d.vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21851,7 +22102,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + d.vkCmdSetEvent2KHR( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21875,7 +22127,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); + d.vkCmdResetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); } template <typename Dispatch> @@ -21885,8 +22137,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents2KHR( - m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); + d.vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + eventCount, + reinterpret_cast<const VkEvent *>( pEvents ), + reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21920,7 +22174,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + d.vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21944,7 +22198,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); + d.vkCmdWriteTimestamp2KHR( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); } template <typename Dispatch> @@ -21955,7 +22210,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); + d.vkQueueSubmit2KHR( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21984,8 +22239,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarker2AMD( - m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker ); + d.vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineStageFlags2>( stage ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + marker ); } template <typename Dispatch> @@ -21994,7 +22252,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); + d.vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22055,7 +22313,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); + d.vkGetDescriptorSetLayoutSizeEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22082,7 +22341,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); + d.vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22110,7 +22370,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); + d.vkGetDescriptorEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22151,7 +22411,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); + d.vkCmdBindDescriptorBuffersEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22179,7 +22440,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, @@ -22228,7 +22489,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( - m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); } template <typename Dispatch> @@ -22236,8 +22497,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + return static_cast<Result>( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22265,8 +22526,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + return static_cast<Result>( d.vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22294,8 +22555,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + return static_cast<Result>( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22323,8 +22584,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + return static_cast<Result>( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22353,7 +22614,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22385,8 +22646,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFragmentShadingRateEnumNV( - m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); + d.vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkFragmentShadingRateNV>( shadingRate ), + reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); } //=== VK_EXT_mesh_shader === @@ -22396,7 +22658,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template <typename Dispatch> @@ -22407,7 +22669,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + d.vkCmdDrawMeshTasksIndirectEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); } template <typename Dispatch> @@ -22420,7 +22683,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, + d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), @@ -22436,7 +22699,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); + d.vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22458,7 +22721,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); + d.vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22480,7 +22743,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22502,7 +22766,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22524,7 +22789,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); + d.vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22546,7 +22811,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); + d.vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22572,7 +22837,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( - m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); + static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); } #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -22583,7 +22848,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); + return static_cast<Result>( d.vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) ); } # else template <typename Dispatch> @@ -22609,7 +22874,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); + return static_cast<Result>( + d.vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22662,7 +22928,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, + return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -22723,7 +22989,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dfb ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22754,7 +23021,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetVertexInputEXT( m_commandBuffer, + d.vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexBindingDescriptionCount, reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), vertexAttributeDescriptionCount, @@ -22791,8 +23058,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); + return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22823,7 +23090,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ), static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), zirconHandle, reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); @@ -22864,7 +23131,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( - m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22893,8 +23160,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); + return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22928,7 +23195,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, + return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) ); @@ -22990,8 +23257,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( - m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); + return static_cast<Result>( + d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkBufferCollectionFUCHSIA>( collection ), + reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23022,8 +23291,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( - m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); + return static_cast<Result>( + d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkBufferCollectionFUCHSIA>( collection ), + reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23054,7 +23325,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferCollectionFUCHSIA( - m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23082,7 +23353,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferCollectionFUCHSIA( - m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23110,8 +23381,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( - m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkBufferCollectionFUCHSIA>( collection ), + reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23144,7 +23416,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( - m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); + static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23171,7 +23443,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); + d.vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) ); } //=== VK_HUAWEI_invocation_mask === @@ -23182,7 +23454,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); + d.vkCmdBindInvocationMaskHUAWEI( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); } //=== VK_NV_external_memory_rdma === @@ -23194,8 +23467,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( - m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); + return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), + reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23225,8 +23499,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPipelinePropertiesEXT( - m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); + return static_cast<Result>( d.vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), + reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23254,7 +23529,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + d.vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints ); } template <typename Dispatch> @@ -23262,21 +23537,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); + d.vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); + d.vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) ); + d.vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) ); } template <typename Dispatch> @@ -23284,7 +23559,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); + d.vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) ); } #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -23297,7 +23572,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, + return static_cast<Result>( d.vkCreateScreenSurfaceQNX( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); @@ -23358,7 +23633,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, window ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23387,7 +23663,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); + d.vkCmdSetColorWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23411,7 +23687,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); + d.vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); } //=== VK_EXT_multi_draw === @@ -23425,7 +23701,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride ); + d.vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + drawCount, + reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), + instanceCount, + firstInstance, + stride ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23459,8 +23740,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMultiIndexedEXT( - m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); + d.vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + drawCount, + reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), + instanceCount, + firstInstance, + stride, + pVertexOffset ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23496,7 +23782,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateMicromapEXT( m_device, + return static_cast<Result>( d.vkCreateMicromapEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) ); @@ -23557,7 +23843,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyMicromapEXT( + static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23583,7 +23870,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyMicromapEXT( + static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23609,7 +23897,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); + d.vkCmdBuildMicromapsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23633,8 +23921,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBuildMicromapsEXT( - m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); + return static_cast<Result>( d.vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + infoCount, + reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23666,8 +23956,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); + return static_cast<Result>( d.vkCopyMicromapEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23698,8 +23988,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( - m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); + return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23729,8 +24020,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( - m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); + return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23764,8 +24056,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( - m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) ); + return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ), + micromapCount, + reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), + static_cast<VkQueryType>( queryType ), + dataSize, + pData, + stride ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23828,7 +24125,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); + d.vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23849,7 +24146,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); + d.vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23871,7 +24168,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); + d.vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23897,7 +24194,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + d.vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), @@ -23934,7 +24231,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceMicromapCompatibilityEXT( m_device, + d.vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); } @@ -23965,7 +24262,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetMicromapBuildSizesEXT( m_device, + d.vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) ); @@ -24000,7 +24297,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template <typename Dispatch> @@ -24009,7 +24306,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); + d.vkCmdDrawClusterIndirectHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); } //=== VK_EXT_pageable_device_local_memory === @@ -24018,7 +24315,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority ); + d.vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), priority ); } //=== VK_KHR_maintenance4 === @@ -24029,8 +24326,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceBufferMemoryRequirementsKHR( - m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24076,8 +24374,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageMemoryRequirementsKHR( - m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24124,7 +24423,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); @@ -24203,7 +24502,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) ); } @@ -24234,7 +24533,7 @@ namespace VULKAN_HPP_NAMESPACE Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData ); + d.vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( descriptorSet ), ppData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24264,7 +24563,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); + d.vkCmdCopyMemoryIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); } template <typename Dispatch> @@ -24277,7 +24576,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + d.vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride, @@ -24319,7 +24618,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); + d.vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + decompressRegionCount, + reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24345,8 +24646,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecompressMemoryIndirectCountNV( - m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride ); + d.vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( indirectCommandsAddress ), + static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), + stride ); } //=== VK_NV_device_generated_commands_compute === @@ -24357,8 +24660,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPipelineIndirectMemoryRequirementsNV( - m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24406,7 +24710,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); + d.vkCmdUpdatePipelineIndirectBufferNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); } template <typename Dispatch> @@ -24415,7 +24720,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<DeviceAddress>( - d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); + d.vkGetPipelineIndirectDeviceAddressNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24441,14 +24746,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) ); + d.vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) ); + d.vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) ); } template <typename Dispatch> @@ -24456,7 +24761,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); + d.vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); } template <typename Dispatch> @@ -24465,7 +24770,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); + d.vkCmdSetSampleMaskEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24496,21 +24802,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) ); + d.vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) ); + d.vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) ); + d.vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) ); } template <typename Dispatch> @@ -24520,7 +24826,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); + d.vkCmdSetColorBlendEnableEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24546,8 +24853,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorBlendEquationEXT( - m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); + d.vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24575,7 +24884,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); + d.vkCmdSetColorWriteMaskEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24601,14 +24911,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); + d.vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream ); + d.vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream ); } template <typename Dispatch> @@ -24617,7 +24927,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); + d.vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); } template <typename Dispatch> @@ -24625,14 +24936,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize ); + d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) ); + d.vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) ); } template <typename Dispatch> @@ -24640,7 +24951,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) ); + d.vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) ); } template <typename Dispatch> @@ -24650,8 +24961,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorBlendAdvancedEXT( - m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); + d.vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24677,7 +24990,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); + d.vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); } template <typename Dispatch> @@ -24685,14 +24998,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); + d.vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) ); + d.vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) ); } template <typename Dispatch> @@ -24700,7 +25013,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) ); + d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) ); } template <typename Dispatch> @@ -24708,7 +25021,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) ); + d.vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) ); } template <typename Dispatch> @@ -24718,7 +25031,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); + d.vkCmdSetViewportSwizzleNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24744,14 +25058,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) ); + d.vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) ); } template <typename Dispatch> VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation ); + d.vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation ); } template <typename Dispatch> @@ -24759,7 +25073,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); + d.vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); } template <typename Dispatch> @@ -24767,7 +25081,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) ); + d.vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageModulationTableEnable ) ); } template <typename Dispatch> @@ -24776,7 +25090,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + d.vkCmdSetCoverageModulationTableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24799,7 +25113,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) ); + d.vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) ); } template <typename Dispatch> @@ -24807,7 +25121,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) ); + d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( representativeFragmentTestEnable ) ); } template <typename Dispatch> @@ -24815,7 +25129,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); + d.vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); } //=== VK_EXT_shader_module_identifier === @@ -24826,7 +25140,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); + d.vkGetShaderModuleIdentifierEXT( + static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24852,8 +25167,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetShaderModuleCreateInfoIdentifierEXT( - m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); + d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24887,7 +25203,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( - d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), pFormatCount, reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) ); @@ -24984,7 +25300,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, + return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) ); @@ -25045,7 +25361,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyOpticalFlowSessionNV( + static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25072,7 +25389,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyOpticalFlowSessionNV( + static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25102,7 +25420,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device, + return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), static_cast<VkImageView>( view ), @@ -25140,8 +25458,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdOpticalFlowExecuteNV( - m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); + d.vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkOpticalFlowSessionNV>( session ), + reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25170,7 +25489,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, + d.vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), @@ -25183,8 +25502,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetRenderingAreaGranularityKHR( - m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); + d.vkGetRenderingAreaGranularityKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), + reinterpret_cast<VkExtent2D *>( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25211,8 +25531,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSubresourceLayoutKHR( - m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); + d.vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), + reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25257,7 +25578,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2KHR( m_device, + d.vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); @@ -25312,7 +25633,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkAntiLagUpdateAMD( m_device, reinterpret_cast<const VkAntiLagDataAMD *>( pData ) ); + d.vkAntiLagUpdateAMD( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAntiLagDataAMD *>( pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25338,7 +25659,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateShadersEXT( m_device, + return static_cast<Result>( d.vkCreateShadersEXT( static_cast<VkDevice>( m_device ), createInfoCount, reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), @@ -25525,7 +25846,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyShaderEXT( + static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25551,7 +25873,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyShaderEXT( + static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25576,7 +25899,7 @@ namespace VULKAN_HPP_NAMESPACE Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); + return static_cast<Result>( d.vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25652,8 +25975,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindShadersEXT( - m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) ); + d.vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + stageCount, + reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), + reinterpret_cast<const VkShaderEXT *>( pShaders ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25688,8 +26013,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClampRangeEXT( - m_commandBuffer, static_cast<VkDepthClampModeEXT>( depthClampMode ), reinterpret_cast<const VkDepthClampRangeEXT *>( pDepthClampRange ) ); + d.vkCmdSetDepthClampRangeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDepthClampModeEXT>( depthClampMode ), + reinterpret_cast<const VkDepthClampRangeEXT *>( pDepthClampRange ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25720,7 +26046,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + return static_cast<Result>( d.vkCreatePipelineBinariesKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( pBinaries ) ) ); @@ -25975,7 +26301,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineBinaryKHR( m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipelineBinaryKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26002,7 +26329,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineBinaryKHR( m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyPipelineBinaryKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26029,8 +26357,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPipelineKeyKHR( - m_device, reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ), reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) ); + return static_cast<Result>( d.vkGetPipelineKeyKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26062,7 +26391,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device, + return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( pInfo ), reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineBinaryKey ), pPipelineBinaryDataSize, @@ -26152,8 +26481,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( - m_device, reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) ); + return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26183,8 +26513,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( - m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ), + static_cast<VkFramebuffer>( framebuffer ), + pPropertiesCount, + reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26263,8 +26595,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( - m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); + return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), + reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26293,8 +26626,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); + return static_cast<Result>( d.vkSetLatencySleepModeNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26322,8 +26655,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); + return static_cast<Result>( d.vkLatencySleepNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26347,7 +26680,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); + d.vkSetLatencyMarkerNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26371,7 +26705,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); + d.vkGetLatencyTimingsNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26425,7 +26760,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); + d.vkQueueNotifyOutOfBandNV( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26450,7 +26785,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( - m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26534,7 +26869,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) ); + d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageAspectFlags>( aspectMask ) ); } #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -26546,7 +26881,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); + return static_cast<Result>( + d.vkGetScreenBufferPropertiesQNX( static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26594,7 +26930,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleKHR( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + d.vkCmdSetLineStippleKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } //=== VK_KHR_calibrated_timestamps === @@ -26605,8 +26941,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( - d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); + return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26685,8 +27021,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( - m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( static_cast<VkDevice>( m_device ), + timestampCount, + reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26765,7 +27104,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) ); + d.vkCmdBindDescriptorSets2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26787,7 +27127,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) ); + d.vkCmdPushConstants2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26809,7 +27149,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) ); + d.vkCmdPushDescriptorSet2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26832,7 +27173,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( pPushDescriptorSetWithTemplateInfo ) ); } @@ -26858,7 +27199,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); + d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26883,7 +27225,8 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( - m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); + static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26911,7 +27254,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); } @@ -26963,8 +27306,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPreprocessGeneratedCommandsEXT( - m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ), static_cast<VkCommandBuffer>( stateCommandBuffer ) ); + d.vkCmdPreprocessGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ), + static_cast<VkCommandBuffer>( stateCommandBuffer ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26989,8 +27333,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteGeneratedCommandsEXT( - m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBool32>( isPreprocessed ), + reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27017,7 +27362,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( m_device, + return static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutEXT *>( pIndirectCommandsLayout ) ) ); @@ -27079,8 +27424,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutEXT( - m_device, static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27107,8 +27453,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutEXT( - m_device, static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27137,7 +27484,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( m_device, + return static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkIndirectExecutionSetEXT *>( pIndirectExecutionSet ) ) ); @@ -27199,8 +27546,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectExecutionSetEXT( - m_device, static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27227,8 +27575,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectExecutionSetEXT( - m_device, static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27256,7 +27605,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, + d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast<VkDevice>( m_device ), static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), executionSetWriteCount, reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( pExecutionSetWrites ) ); @@ -27289,7 +27638,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateIndirectExecutionSetShaderEXT( m_device, + d.vkUpdateIndirectExecutionSetShaderEXT( static_cast<VkDevice>( m_device ), static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), executionSetWriteCount, reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( pExecutionSetWrites ) ); diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp index 5965ef4..068656a 100644 --- a/include/vulkan/vulkan_handles.hpp +++ b/include/vulkan/vulkan_handles.hpp @@ -6078,33 +6078,40 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMDX_shader_enqueue === template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp index e6195fa..c025d6c 100644 --- a/include/vulkan/vulkan_hash.hpp +++ b/include/vulkan/vulkan_hash.hpp @@ -4626,7 +4626,9 @@ namespace std std::size_t seed = 0; VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType ); VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.size ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.minSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.maxSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sizeGranularity ); return seed; } }; @@ -11865,6 +11867,7 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderMeshEnqueue ); return seed; } }; @@ -11885,6 +11888,11 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroups ); return seed; } }; diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp index 905d884..9b06159 100644 --- a/include/vulkan/vulkan_raii.hpp +++ b/include/vulkan/vulkan_raii.hpp @@ -5947,15 +5947,20 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT; + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT; void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -18376,43 +18381,55 @@ namespace VULKAN_HPP_NAMESPACE return nodeIndex; } - VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX && "Function <vkCmdInitializeGraphScratchMemoryAMDX> requires <VK_AMDX_shader_enqueue>" ); - getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ) ); + getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipeline>( executionGraph ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ) ); } VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" ); getDispatcher()->vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" ); getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX && "Function <vkCmdDispatchGraphIndirectCountAMDX> requires <VK_AMDX_shader_enqueue>" ); - getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( - static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) ); + getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + static_cast<VkDeviceAddress>( countInfo ) ); } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -21566,8 +21583,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( - m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); + return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), + reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp index 033025c..a9d7102 100644 --- a/include/vulkan/vulkan_structs.hpp +++ b/include/vulkan/vulkan_structs.hpp @@ -34505,9 +34505,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } - , size{ size_ } + , minSize{ minSize_ } + , maxSize{ maxSize_ } + , sizeGranularity{ sizeGranularity_ } { } @@ -34534,9 +34539,21 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ ) VULKAN_HPP_NOEXCEPT { - size = size_; + minSize = minSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ ) VULKAN_HPP_NOEXCEPT + { + maxSize = maxSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ ) VULKAN_HPP_NOEXCEPT + { + sizeGranularity = sizeGranularity_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ @@ -34555,11 +34572,15 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &> + std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, + void * const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, size ); + return std::tie( sType, pNext, minSize, maxSize, sizeGranularity ); } # endif @@ -34571,7 +34592,8 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSize == rhs.minSize ) && ( maxSize == rhs.maxSize ) && + ( sizeGranularity == rhs.sizeGranularity ); # endif } @@ -34582,9 +34604,11 @@ namespace VULKAN_HPP_NAMESPACE # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity = {}; }; template <> @@ -39792,7 +39816,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, uint32_t maxSequenceCount_ = {}, uint32_t maxDrawCount_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indirectExecutionSet{ indirectExecutionSet_ } , indirectCommandsLayout{ indirectCommandsLayout_ } @@ -39818,7 +39842,7 @@ namespace VULKAN_HPP_NAMESPACE } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39866,7 +39890,7 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, - void * const &, + const void * const &, VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const &, uint32_t const &, @@ -39899,7 +39923,7 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; - void * pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; uint32_t maxSequenceCount = {}; @@ -87116,10 +87140,12 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderEnqueue{ shaderEnqueue_ } + , shaderMeshEnqueue{ shaderMeshEnqueue_ } { } @@ -87151,6 +87177,13 @@ namespace VULKAN_HPP_NAMESPACE shaderEnqueue = shaderEnqueue_; return *this; } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & + setShaderMeshEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderMeshEnqueue = shaderMeshEnqueue_; + return *this; + } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT @@ -87167,11 +87200,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &> + std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderEnqueue ); + return std::tie( sType, pNext, shaderEnqueue, shaderMeshEnqueue ); } # endif @@ -87183,7 +87216,7 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ) && ( shaderMeshEnqueue == rhs.shaderMeshEnqueue ); # endif } @@ -87194,9 +87227,10 @@ namespace VULKAN_HPP_NAMESPACE # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue = {}; }; template <> @@ -87215,22 +87249,26 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, - uint32_t maxExecutionGraphShaderOutputNodes_ = {}, - uint32_t maxExecutionGraphShaderPayloadSize_ = {}, - uint32_t maxExecutionGraphShaderPayloadCount_ = {}, - uint32_t executionGraphDispatchAddressAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, + uint32_t maxExecutionGraphShaderOutputNodes_ = {}, + uint32_t maxExecutionGraphShaderPayloadSize_ = {}, + uint32_t maxExecutionGraphShaderPayloadCount_ = {}, + uint32_t executionGraphDispatchAddressAlignment_ = {}, + std::array<uint32_t, 3> const & maxExecutionGraphWorkgroupCount_ = {}, + uint32_t maxExecutionGraphWorkgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxExecutionGraphDepth{ maxExecutionGraphDepth_ } , maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ } , maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ } , maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ } , executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ } + , maxExecutionGraphWorkgroupCount{ maxExecutionGraphWorkgroupCount_ } + , maxExecutionGraphWorkgroups{ maxExecutionGraphWorkgroups_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs ) ) @@ -87286,6 +87324,20 @@ namespace VULKAN_HPP_NAMESPACE executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; return *this; } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroupCount( std::array<uint32_t, 3> maxExecutionGraphWorkgroupCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroups( uint32_t maxExecutionGraphWorkgroups_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroups = maxExecutionGraphWorkgroups_; + return *this; + } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT @@ -87308,6 +87360,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t const &, uint32_t const &, uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -87318,7 +87372,9 @@ namespace VULKAN_HPP_NAMESPACE maxExecutionGraphShaderOutputNodes, maxExecutionGraphShaderPayloadSize, maxExecutionGraphShaderPayloadCount, - executionGraphDispatchAddressAlignment ); + executionGraphDispatchAddressAlignment, + maxExecutionGraphWorkgroupCount, + maxExecutionGraphWorkgroups ); } # endif @@ -87334,7 +87390,8 @@ namespace VULKAN_HPP_NAMESPACE ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) && ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) && ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) && - ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ); + ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ) && + ( maxExecutionGraphWorkgroupCount == rhs.maxExecutionGraphWorkgroupCount ) && ( maxExecutionGraphWorkgroups == rhs.maxExecutionGraphWorkgroups ); # endif } @@ -87345,13 +87402,15 @@ namespace VULKAN_HPP_NAMESPACE # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; - void * pNext = {}; - uint32_t maxExecutionGraphDepth = {}; - uint32_t maxExecutionGraphShaderOutputNodes = {}; - uint32_t maxExecutionGraphShaderPayloadSize = {}; - uint32_t maxExecutionGraphShaderPayloadCount = {}; - uint32_t executionGraphDispatchAddressAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + void * pNext = {}; + uint32_t maxExecutionGraphDepth = {}; + uint32_t maxExecutionGraphShaderOutputNodes = {}; + uint32_t maxExecutionGraphShaderPayloadSize = {}; + uint32_t maxExecutionGraphShaderPayloadCount = {}; + uint32_t executionGraphDispatchAddressAlignment = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxExecutionGraphWorkgroupCount = {}; + uint32_t maxExecutionGraphWorkgroups = {}; }; template <> diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp index bac535f..7034d36 100644 --- a/include/vulkan/vulkan_to_string.hpp +++ b/include/vulkan/vulkan_to_string.hpp @@ -3385,6 +3385,10 @@ namespace VULKAN_HPP_NAMESPACE result += "AllowDerivatives | "; if ( value & PipelineCreateFlagBits2KHR::eDerivative ) result += "Derivative | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits2KHR::eExecutionGraphAMDX ) + result += "ExecutionGraphAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT ) result += "EnableLegacyDitheringEXT | "; if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) @@ -8919,6 +8923,9 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits2KHR::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; diff --git a/registry/validusage.json b/registry/validusage.json index 431baa6..d4cf45e 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.297", - "comment": "from git branch: github-main commit: 4f8b03b46334ca0abb992b3d7e38ea90f4f68a23", - "date": "2024-10-04 12:37:52Z" + "api version": "1.3.298", + "comment": "from git branch: github-main commit: 05d5444cce55e340e07ead87d552407da5ea8771", + "date": "2024-10-11 07:51:42Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -15821,6 +15821,31 @@ "page": "vkspec" }, { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-shaderMeshEnqueue-10187", + "text": "If <a href=\"#features-shaderMeshEnqueue\"><code>shaderMeshEnqueue</code></a> is not enabled, shaders specified by <code>pStages</code> <strong class=\"purple\">must</strong> not declare the <code>ShaderEnqueueAMDX</code> capability", + "page": "vkspec" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-10188", + "text": "If <code>flags</code> does not include <code>VK_PIPELINE_CREATE_LIBRARY_BIT_KHR</code>, shaders specified by <code>pStages</code> <strong class=\"purple\">must</strong> not declare the <code>ShaderEnqueueAMDX</code> capability", + "page": "vkspec" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-10189", + "text": "If any shader stages in <code>pStages</code> declare the <code>ShaderEnqueueAMDX</code> capability, <code>VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX</code> and <code>VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR</code> <strong class=\"purple\">must</strong> be included in <code>flags</code>", + "page": "vkspec" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-10190", + "text": "If <code>VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX</code> is included in <code>flags</code>, and the pipeline requires <a href=\"#pipelines-graphics-subsets-pre-rasterization\">pre-rasterization shader state</a>, there <strong class=\"purple\">must</strong> not be a task or vertex shader specified in <code>pStages</code>", + "page": "vkspec" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-10191", + "text": "If <code>VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX</code> is included in <code>flags</code>, all elements of <a href=\"#VkPipelineLibraryCreateInfoKHR\">VkPipelineLibraryCreateInfoKHR</a>::<code>pLibraries</code> <strong class=\"purple\">must</strong> have been created with <code>VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX</code>", + "page": "vkspec" + }, + { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03372", "text": "<code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR</code>", "page": "vkspec" @@ -16377,12 +16402,12 @@ }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-graphicsPipelineLibrary-06606", - "text": "If the <a href=\"#features-graphicsPipelineLibrary\"><code>graphicsPipelineLibrary</code></a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_CREATE_LIBRARY_BIT_KHR</code>", + "text": "If the <a href=\"#features-graphicsPipelineLibrary\"><code>graphicsPipelineLibrary</code></a> feature is not enabled, and if the <a href=\"#features-shaderMeshEnqueue\"><code>shaderMeshEnqueue</code></a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_CREATE_LIBRARY_BIT_KHR</code>", "page": "vkspec" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-06608", - "text": "If the pipeline defines, or includes as libraries, all the state subsets required for a <a href=\"#pipelines-graphics-subsets-complete\">complete graphics pipeline</a>, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_CREATE_LIBRARY_BIT_KHR</code>", + "text": "If <a href=\"#features-shaderMeshEnqueue\"><code>shaderMeshEnqueue</code> feature</a> is not enabled, and the pipeline is being created with <a href=\"#pipelines-graphics-subsets-complete\">all possible state subsets</a>, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_CREATE_LIBRARY_BIT_KHR</code>", "page": "vkspec" }, { @@ -77604,7 +77629,7 @@ }, { "vuid": "VUID-VkIndirectCommandsLayoutTokenEXT-pPushConstant-parameter", - "text": "If <code>type</code> is <code>VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT</code>, the <code>pPushConstant</code> member of <code>data</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkIndirectCommandsPushConstantTokenEXT\">VkIndirectCommandsPushConstantTokenEXT</a> structure", + "text": "If <code>type</code> is <code>VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT</code>,VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT, the <code>pPushConstant</code> member of <code>data</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkIndirectCommandsPushConstantTokenEXT\">VkIndirectCommandsPushConstantTokenEXT</a> structure", "page": "vkspec" }, { @@ -97098,7 +97123,22 @@ }, { "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pLibraryInfo-09133", - "text": "If <code>pLibraryInfo</code> is not <code>NULL</code>, each element of <code>pLibraryInfo->libraries</code> <strong class=\"purple\">must</strong> be either a compute pipeline or an execution graph pipeline", + "text": "If <code>pLibraryInfo</code> is not <code>NULL</code>, each element of <code>pLibraryInfo->pLibraries</code> <strong class=\"purple\">must</strong> be either a compute pipeline, an execution graph pipeline, or a graphics pipeline", + "page": "vkspec" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pLibraryInfo-10181", + "text": "If <code>pLibraryInfo</code> is not <code>NULL</code>, each element of <code>pLibraryInfo->pLibraries</code> that is a compute pipeline or a graphics pipeline <strong class=\"purple\">must</strong> have been created with <code>VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX</code> set", + "page": "vkspec" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-shaderMeshEnqueue-10182", + "text": "If the <a href=\"#features-shaderMeshEnqueue\"><code>shaderMeshEnqueue</code></a> feature is not enabled, and <code>pLibraryInfo->pLibraries</code> is not <code>NULL</code>, <code>pLibraryInfo->pLibraries</code> <strong class=\"purple\">must</strong> not contain any graphics pipelines", + "page": "vkspec" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pLibraryInfo-10183", + "text": "Any element of <code>pLibraryInfo->pLibraries</code> identifying a graphics pipeline <strong class=\"purple\">must</strong> have been created with <a href=\"#pipelines-graphics-subsets-complete\">all possible state subsets</a>", "page": "vkspec" }, { @@ -97132,6 +97172,11 @@ "page": "vkspec" }, { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-10184", + "text": "If <code>flags</code> does not include <code>VK_PIPELINE_CREATE_LIBRARY_BIT_KHR</code>, and an output payload declared in any shader in the pipeline does not have a <code>PayloadNodeSparseArrayAMDX</code> decoration, there <strong class=\"purple\">must</strong> be a node in the graph corresponding to every index from 0 to its <code>PayloadNodeArraySizeAMDX</code> decoration", + "page": "vkspec" + }, + { "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-sType-sType", "text": "<code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX</code>", "page": "vkspec" @@ -97262,8 +97307,13 @@ "vkCmdInitializeGraphScratchMemoryAMDX": { "core": [ { - "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-scratch-09143", - "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as the value of <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code> returned by <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-scratch-10185", + "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as <code>scratchSize</code>", + "page": "vkspec" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-scratchSize-10186", + "text": "<code>scratchSize</code> <strong class=\"purple\">must</strong> be greater than or equal to <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>minSize</code> returned by <a href=\"#vkGetExecutionGraphPipelineScratchSizeAMDX\">vkGetExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", "page": "vkspec" }, { @@ -97277,6 +97327,11 @@ "page": "vkspec" }, { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-executionGraph-parameter", + "text": "<code>executionGraph</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle", + "page": "vkspec" + }, + { "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-commandBuffer-recording", "text": "<code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>", "page": "vkspec" @@ -97287,11 +97342,6 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-renderpass", - "text": "This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance", - "page": "vkspec" - }, - { "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-videocoding", "text": "This command <strong class=\"purple\">must</strong> only be called outside of a video coding scope", "page": "vkspec" @@ -97300,6 +97350,11 @@ "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-bufferlevel", "text": "<code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>", "page": "vkspec" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-commonparent", + "text": "Both of <code>commandBuffer</code>, and <code>executionGraph</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>", + "page": "vkspec" } ] }, @@ -97661,8 +97716,13 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-09183", - "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as the value of <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code> returned by <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", + "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-10192", + "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as <code>scratchSize</code>", + "page": "vkspec" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-scratchSize-10193", + "text": "<code>scratchSize</code> <strong class=\"purple\">must</strong> be greater than or equal to <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>minSize</code> returned by <a href=\"#vkGetExecutionGraphPipelineScratchSizeAMDX\">vkGetExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", "page": "vkspec" }, { @@ -97671,8 +97731,8 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-09185", - "text": "Device memory in the range [<code>scratch</code>,<code>scratch</code><br> <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code>) <strong class=\"purple\">must</strong> have been initialized with <a href=\"#vkCmdInitializeGraphScratchMemoryAMDX\">vkCmdInitializeGraphScratchMemoryAMDX</a> using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command", + "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-10194", + "text": "The device memory range [<code>scratch</code>,<code>scratch</code><br> <code>scratchSize</code>] <strong class=\"purple\">must</strong> have been initialized with <a href=\"#vkCmdInitializeGraphScratchMemoryAMDX\">vkCmdInitializeGraphScratchMemoryAMDX</a> using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command", "page": "vkspec" }, { @@ -97691,6 +97751,11 @@ "page": "vkspec" }, { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-10195", + "text": "If the currently bound execution graph pipeline includes draw nodes, this command <strong class=\"purple\">must</strong> be called within a render pass instance that is compatible with the graphics pipeline used to create each of those nodes", + "page": "vkspec" + }, + { "vuid": "VUID-vkCmdDispatchGraphAMDX-pCountInfo-09145", "text": "<code>pCountInfo->infos</code> <strong class=\"purple\">must</strong> be a host pointer to a memory allocation at least as large as the product of <code>count</code> and <code>stride</code>", "page": "vkspec" @@ -97736,11 +97801,6 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphAMDX-renderpass", - "text": "This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance", - "page": "vkspec" - }, - { "vuid": "VUID-vkCmdDispatchGraphAMDX-videocoding", "text": "This command <strong class=\"purple\">must</strong> only be called outside of a video coding scope", "page": "vkspec" @@ -98110,8 +98170,13 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-09183", - "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as the value of <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code> returned by <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-10192", + "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as <code>scratchSize</code>", + "page": "vkspec" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratchSize-10193", + "text": "<code>scratchSize</code> <strong class=\"purple\">must</strong> be greater than or equal to <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>minSize</code> returned by <a href=\"#vkGetExecutionGraphPipelineScratchSizeAMDX\">vkGetExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", "page": "vkspec" }, { @@ -98120,8 +98185,8 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-09185", - "text": "Device memory in the range [<code>scratch</code>,<code>scratch</code><br> <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code>) <strong class=\"purple\">must</strong> have been initialized with <a href=\"#vkCmdInitializeGraphScratchMemoryAMDX\">vkCmdInitializeGraphScratchMemoryAMDX</a> using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command", + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-10194", + "text": "The device memory range [<code>scratch</code>,<code>scratch</code><br> <code>scratchSize</code>] <strong class=\"purple\">must</strong> have been initialized with <a href=\"#vkCmdInitializeGraphScratchMemoryAMDX\">vkCmdInitializeGraphScratchMemoryAMDX</a> using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command", "page": "vkspec" }, { @@ -98140,6 +98205,11 @@ "page": "vkspec" }, { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-10195", + "text": "If the currently bound execution graph pipeline includes draw nodes, this command <strong class=\"purple\">must</strong> be called within a render pass instance that is compatible with the graphics pipeline used to create each of those nodes", + "page": "vkspec" + }, + { "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09150", "text": "<code>pCountInfo->infos</code> <strong class=\"purple\">must</strong> be a device pointer to a memory allocation at least as large as the product of <code>count</code> and <code>stride</code> when this command is executed on the device", "page": "vkspec" @@ -98205,11 +98275,6 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-renderpass", - "text": "This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance", - "page": "vkspec" - }, - { "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-videocoding", "text": "This command <strong class=\"purple\">must</strong> only be called outside of a video coding scope", "page": "vkspec" @@ -98579,8 +98644,13 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-09183", - "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as the value of <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code> returned by <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-10192", + "text": "<code>scratch</code> <strong class=\"purple\">must</strong> be the device address of an allocated memory range at least as large as <code>scratchSize</code>", + "page": "vkspec" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratchSize-10193", + "text": "<code>scratchSize</code> <strong class=\"purple\">must</strong> be greater than or equal to <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>minSize</code> returned by <a href=\"#vkGetExecutionGraphPipelineScratchSizeAMDX\">vkGetExecutionGraphPipelineScratchSizeAMDX</a> for the currently bound execution graph pipeline", "page": "vkspec" }, { @@ -98589,8 +98659,8 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-09185", - "text": "Device memory in the range [<code>scratch</code>,<code>scratch</code><br> <a href=\"#VkExecutionGraphPipelineScratchSizeAMDX\">VkExecutionGraphPipelineScratchSizeAMDX</a>::<code>size</code>) <strong class=\"purple\">must</strong> have been initialized with <a href=\"#vkCmdInitializeGraphScratchMemoryAMDX\">vkCmdInitializeGraphScratchMemoryAMDX</a> using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command", + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-10194", + "text": "The device memory range [<code>scratch</code>,<code>scratch</code><br> <code>scratchSize</code>] <strong class=\"purple\">must</strong> have been initialized with <a href=\"#vkCmdInitializeGraphScratchMemoryAMDX\">vkCmdInitializeGraphScratchMemoryAMDX</a> using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command", "page": "vkspec" }, { @@ -98609,6 +98679,11 @@ "page": "vkspec" }, { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-10195", + "text": "If the currently bound execution graph pipeline includes draw nodes, this command <strong class=\"purple\">must</strong> be called within a render pass instance that is compatible with the graphics pipeline used to create each of those nodes", + "page": "vkspec" + }, + { "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09159", "text": "<code>countInfo</code> <strong class=\"purple\">must</strong> be a device pointer to a memory allocation containing a valid <a href=\"#VkDispatchGraphCountInfoAMDX\">VkDispatchGraphCountInfoAMDX</a> structure when this command is executed on the device", "page": "vkspec" @@ -98684,11 +98759,6 @@ "page": "vkspec" }, { - "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-renderpass", - "text": "This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance", - "page": "vkspec" - }, - { "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-videocoding", "text": "This command <strong class=\"purple\">must</strong> only be called outside of a video coding scope", "page": "vkspec" @@ -104997,12 +105067,12 @@ }, { "vuid": "VUID-RuntimeSpirv-ShaderEnqueueAMDX-09191", - "text": "The <code>ShaderEnqueueAMDX</code> capability <strong class=\"purple\">must</strong> only be used in shaders with the <code>GLCompute</code> execution model", + "text": "The <code>ShaderEnqueueAMDX</code> capability <strong class=\"purple\">must</strong> only be used in shaders with the <code>GLCompute</code> or <code>MeshEXT</code> execution model", "page": "vkspec" }, { "vuid": "VUID-RuntimeSpirv-NodePayloadAMDX-09192", - "text": "Variables in the <code>NodePayloadAMDX</code> storage class <strong class=\"purple\">must</strong> only be declared in the <code>GLCompute</code> execution model", + "text": "Variables in the <code>NodePayloadAMDX</code> storage class <strong class=\"purple\">must</strong> only be declared in the <code>GLCompute</code> or <code>MeshEXT</code> execution model", "page": "vkspec" }, { diff --git a/registry/vk.xml b/registry/vk.xml index 6c2246c..be068ae 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define <name>VKSC_API_VERSION_1_0</name> <type>VK_MAKE_API_VERSION</type>(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0</type> <type api="vulkan" category="define">// Version of this file -#define <name>VK_HEADER_VERSION</name> 297</type> +#define <name>VK_HEADER_VERSION</name> 298</type> <type api="vulkan" category="define" requires="VK_HEADER_VERSION">// Complete version of this file #define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 3, VK_HEADER_VERSION)</type> <type api="vulkansc" category="define">// Version of this file @@ -6410,10 +6410,10 @@ typedef void* <name>MTLSharedEvent_id</name>; <member limittype="max"><type>uint32_t</type> <name>maxIndirectCommandsTokenCount</name></member> <member limittype="max"><type>uint32_t</type> <name>maxIndirectCommandsTokenOffset</name></member> <member limittype="max"><type>uint32_t</type> <name>maxIndirectCommandsIndirectStride</name></member> - <member><type>VkIndirectCommandsInputModeFlagsEXT</type> <name>supportedIndirectCommandsInputModes</name></member> - <member><type>VkShaderStageFlags</type> <name>supportedIndirectCommandsShaderStages</name></member> - <member><type>VkShaderStageFlags</type> <name>supportedIndirectCommandsShaderStagesPipelineBinding</name></member> - <member><type>VkShaderStageFlags</type> <name>supportedIndirectCommandsShaderStagesShaderBinding</name></member> + <member limittype="bitmask"><type>VkIndirectCommandsInputModeFlagsEXT</type> <name>supportedIndirectCommandsInputModes</name></member> + <member limittype="bitmask"><type>VkShaderStageFlags</type> <name>supportedIndirectCommandsShaderStages</name></member> + <member limittype="bitmask"><type>VkShaderStageFlags</type> <name>supportedIndirectCommandsShaderStagesPipelineBinding</name></member> + <member limittype="bitmask"><type>VkShaderStageFlags</type> <name>supportedIndirectCommandsShaderStagesShaderBinding</name></member> <member><type>VkBool32</type> <name>deviceGeneratedCommandsTransformFeedback</name></member> <member><type>VkBool32</type> <name>deviceGeneratedCommandsMultiDrawIndirectCount</name></member> </type> @@ -6430,7 +6430,7 @@ typedef void* <name>MTLSharedEvent_id</name>; </type> <type category="struct" name="VkGeneratedCommandsMemoryRequirementsInfoEXT"> <member values="VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member> - <member optional="true"><type>void</type>* <name>pNext</name></member> + <member optional="true">const <type>void</type>* <name>pNext</name></member> <member optional="true"><type>VkIndirectExecutionSetEXT</type> <name>indirectExecutionSet</name></member> <member><type>VkIndirectCommandsLayoutEXT</type> <name>indirectCommandsLayout</name></member> <member><type>uint32_t</type> <name>maxSequenceCount</name></member> @@ -6540,7 +6540,7 @@ typedef void* <name>MTLSharedEvent_id</name>; <member><type>VkShaderStageFlags</type> <name>shaderStages</name></member> </type> <type category="union" name="VkIndirectCommandsTokenDataEXT"> - <member selection="VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT">const <type>VkIndirectCommandsPushConstantTokenEXT</type>* <name>pPushConstant</name></member> + <member selection="VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT,VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT">const <type>VkIndirectCommandsPushConstantTokenEXT</type>* <name>pPushConstant</name></member> <member selection="VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT">const <type>VkIndirectCommandsVertexBufferTokenEXT</type>* <name>pVertexBuffer</name></member> <member selection="VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT">const <type>VkIndirectCommandsIndexBufferTokenEXT</type>* <name>pIndexBuffer</name></member> <member selection="VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT">const <type>VkIndirectCommandsExecutionSetTokenEXT</type>* <name>pExecutionSet</name></member> @@ -8994,11 +8994,14 @@ typedef void* <name>MTLSharedEvent_id</name>; <member limittype="max"><type>uint32_t</type> <name>maxExecutionGraphShaderPayloadSize</name></member> <member limittype="max"><type>uint32_t</type> <name>maxExecutionGraphShaderPayloadCount</name></member> <member limittype="noauto"><type>uint32_t</type> <name>executionGraphDispatchAddressAlignment</name></member> + <member limittype="max"><type>uint32_t</type> <name>maxExecutionGraphWorkgroupCount</name>[3]</member> + <member limittype="max"><type>uint32_t</type> <name>maxExecutionGraphWorkgroups</name></member> </type> <type category="struct" name="VkPhysicalDeviceShaderEnqueueFeaturesAMDX" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo"> <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX"><type>VkStructureType</type> <name>sType</name></member> <member noautovalidity="true" optional="true"><type>void</type>* <name>pNext</name></member> <member><type>VkBool32</type> <name>shaderEnqueue</name></member> + <member><type>VkBool32</type> <name>shaderMeshEnqueue</name></member> </type> <type category="struct" name="VkExecutionGraphPipelineCreateInfoAMDX"> <member values="VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX"><type>VkStructureType</type> <name>sType</name></member> @@ -9020,7 +9023,9 @@ typedef void* <name>MTLSharedEvent_id</name>; <type category="struct" name="VkExecutionGraphPipelineScratchSizeAMDX"> <member values="VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX"><type>VkStructureType</type> <name>sType</name></member> <member noautovalidity="true" optional="true"><type>void</type>* <name>pNext</name></member> - <member><type>VkDeviceSize</type> <name>size</name></member> + <member><type>VkDeviceSize</type> <name>minSize</name></member> + <member><type>VkDeviceSize</type> <name>maxSize</name></member> + <member><type>VkDeviceSize</type> <name>sizeGranularity</name></member> </type> <type category="struct" name="VkDispatchGraphInfoAMDX"> <member><type>uint32_t</type> <name>nodeIndex</name></member> @@ -15718,13 +15723,13 @@ typedef void* <name>MTLSharedEvent_id</name>; <proto><type>VkResult</type> <name>vkGetExecutionGraphPipelineScratchSizeAMDX</name></proto> <param><type>VkDevice</type> <name>device</name></param> <param><type>VkPipeline</type> <name>executionGraph</name></param> - <param><type>VkExecutionGraphPipelineScratchSizeAMDX</type>* <name>pSizeInfo</name></param> + <param><type>VkExecutionGraphPipelineScratchSizeAMDX</type>* <name>pSizeInfo</name></param> </command> <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY"> <proto><type>VkResult</type> <name>vkGetExecutionGraphPipelineNodeIndexAMDX</name></proto> <param><type>VkDevice</type> <name>device</name></param> <param><type>VkPipeline</type> <name>executionGraph</name></param> - <param>const <type>VkPipelineShaderStageNodeCreateInfoAMDX</type>* <name>pNodeInfo</name></param> + <param>const <type>VkPipelineShaderStageNodeCreateInfoAMDX</type>* <name>pNodeInfo</name></param> <param><type>uint32_t</type>* <name>pNodeIndex</name></param> </command> <command successcodes="VK_SUCCESS,VK_PIPELINE_COMPILE_REQUIRED_EXT" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY"> @@ -15736,27 +15741,32 @@ typedef void* <name>MTLSharedEvent_id</name>; <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param> <param len="createInfoCount"><type>VkPipeline</type>* <name>pPipelines</name></param> </command> - <command queues="graphics,compute" tasks="action" renderpass="outside" cmdbufferlevel="primary"> + <command queues="graphics,compute" tasks="action" renderpass="both" cmdbufferlevel="primary"> <proto><type>void</type> <name>vkCmdInitializeGraphScratchMemoryAMDX</name></proto> <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param> + <param><type>VkPipeline</type> <name>executionGraph</name></param> <param><type>VkDeviceAddress</type> <name>scratch</name></param> + <param><type>VkDeviceSize</type> <name>scratchSize</name></param> </command> - <command queues="graphics,compute" tasks="action" renderpass="outside" cmdbufferlevel="primary"> + <command queues="graphics,compute" tasks="action" renderpass="both" cmdbufferlevel="primary"> <proto><type>void</type> <name>vkCmdDispatchGraphAMDX</name></proto> <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param> <param><type>VkDeviceAddress</type> <name>scratch</name></param> - <param>const <type>VkDispatchGraphCountInfoAMDX</type>* <name>pCountInfo</name></param> + <param><type>VkDeviceSize</type> <name>scratchSize</name></param> + <param>const <type>VkDispatchGraphCountInfoAMDX</type>* <name>pCountInfo</name></param> </command> - <command queues="graphics,compute" tasks="action" renderpass="outside" cmdbufferlevel="primary"> + <command queues="graphics,compute" tasks="action" renderpass="both" cmdbufferlevel="primary"> <proto><type>void</type> <name>vkCmdDispatchGraphIndirectAMDX</name></proto> <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param> <param><type>VkDeviceAddress</type> <name>scratch</name></param> - <param>const <type>VkDispatchGraphCountInfoAMDX</type>* <name>pCountInfo</name></param> + <param><type>VkDeviceSize</type> <name>scratchSize</name></param> + <param>const <type>VkDispatchGraphCountInfoAMDX</type>* <name>pCountInfo</name></param> </command> - <command queues="graphics,compute" tasks="action" renderpass="outside" cmdbufferlevel="primary"> + <command queues="graphics,compute" tasks="action" renderpass="both" cmdbufferlevel="primary"> <proto><type>void</type> <name>vkCmdDispatchGraphIndirectCountAMDX</name></proto> <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param> <param><type>VkDeviceAddress</type> <name>scratch</name></param> + <param><type>VkDeviceSize</type> <name>scratchSize</name></param> <param><type>VkDeviceAddress</type> <name>countInfo</name></param> </command> <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" tasks="state"> @@ -19265,10 +19275,10 @@ typedef void* <name>MTLSharedEvent_id</name>; <enum value=""VK_AMD_extension_134"" name="VK_AMD_EXTENSION_134_EXTENSION_NAME"/> </require> </extension> - <extension name="VK_AMDX_shader_enqueue" number="135" author="AMD" depends="(((VK_KHR_get_physical_device_properties2,VK_VERSION_1_1)+VK_KHR_synchronization2),VK_VERSION_1_3)+VK_KHR_pipeline_library+VK_KHR_spirv_1_4" type="device" contact="Tobias Hector @tobski" provisional="true" platform="provisional" supported="vulkan"> + <extension name="VK_AMDX_shader_enqueue" number="135" author="AMD" depends="((VK_KHR_synchronization2+VK_KHR_spirv_1_4+VK_EXT_extended_dynamic_state),VK_VERSION_1_3)+VK_KHR_maintenance5+VK_KHR_pipeline_library" type="device" contact="Tobias Hector @tobski" provisional="true" platform="provisional" supported="vulkan"> <require> - <enum value="1" name="VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION"/> - <enum value=""VK_AMDX_shader_enqueue"" name="VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME"/> + <enum value="2" name="VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION"/> + <enum value=""VK_AMDX_shader_enqueue"" name="VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME"/> <enum name="VK_SHADER_INDEX_UNUSED_AMDX"/> <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX" protect="VK_ENABLE_BETA_EXTENSIONS"/> <enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX" protect="VK_ENABLE_BETA_EXTENSIONS"/> @@ -19295,7 +19305,11 @@ typedef void* <name>MTLSharedEvent_id</name>; <feature name="shaderEnqueue" struct="VkPhysicalDeviceShaderEnqueueFeaturesAMDX"/> </require> <require depends="VK_KHR_maintenance5"> - <enum bitpos="25" extends="VkBufferUsageFlagBits2KHR" name="VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX"/> + <enum bitpos="25" extends="VkBufferUsageFlagBits2KHR" name="VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX" protect="VK_ENABLE_BETA_EXTENSIONS"/> + <enum bitpos="32" extends="VkPipelineCreateFlagBits2KHR" name="VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX" protect="VK_ENABLE_BETA_EXTENSIONS"/> + </require> + <require depends="VK_EXT_mesh_shader"> + <feature name="shaderMeshEnqueue" struct="VkPhysicalDeviceShaderEnqueueFeaturesAMDX"/> </require> </extension> <extension name="VK_KHR_extension_136" number="136" type="device" depends="VK_KHR_maintenance5" author="KHR" contact="Tobias Hector @tobski" supported="disabled"> @@ -24074,7 +24088,6 @@ typedef void* <name>MTLSharedEvent_id</name>; <require> <enum value="0" name="VK_AMD_EXTENSION_479_SPEC_VERSION"/> <enum value=""VK_AMD_extension_479"" name="VK_AMD_EXTENSION_479_EXTENSION_NAME"/> - <enum bitpos="32" extends="VkPipelineCreateFlagBits2KHR" name="VK_PIPELINE_CREATE_2_RESERVED_32_BIT_KHR"/> </require> </extension> <extension name="VK_EXT_extension_480" number="480" author="EXT" contact="Daniel Stone" supported="disabled"> @@ -25424,6 +25437,18 @@ typedef void* <name>MTLSharedEvent_id</name>; <enum value=""VK_KHR_extension_606"" name="VK_KHR_EXTENSION_606_EXTENSION_NAME"/> </require> </extension> + <extension name="VK_KHR_extension_607" number="607" author="ARM" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" supported="disabled"> + <require> + <enum value="0" name="VK_KHR_EXTENSION_607_SPEC_VERSION"/> + <enum value=""VK_KHR_extension_607"" name="VK_KHR_EXTENSION_607_EXTENSION_NAME"/> + </require> + </extension> + <extension name="VK_KHR_extension_608" number="608" author="ARM" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" supported="disabled"> + <require> + <enum value="0" name="VK_KHR_EXTENSION_608_SPEC_VERSION"/> + <enum value=""VK_KHR_extension_608"" name="VK_KHR_EXTENSION_608_EXTENSION_NAME"/> + </require> + </extension> </extensions> <formats> <format name="VK_FORMAT_R4G4_UNORM_PACK8" class="8-bit" blockSize="1" texelsPerBlock="1" packed="8"> |