aboutsummaryrefslogtreecommitdiffhomepage
path: root/include/vulkan/vulkan_funcs.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'include/vulkan/vulkan_funcs.hpp')
-rw-r--r--include/vulkan/vulkan_funcs.hpp2153
1 files changed, 1251 insertions, 902 deletions
diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp
index 95ffd72..b07c66b 100644
--- a/include/vulkan/vulkan_funcs.hpp
+++ b/include/vulkan/vulkan_funcs.hpp
@@ -79,7 +79,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -102,7 +102,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
+ return static_cast<Result>(
+ d.vkEnumeratePhysicalDevices( static_cast<VkInstance>( m_instance ), pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -175,7 +176,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
+ d.vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -201,7 +202,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
+ d.vkGetPhysicalDeviceFormatProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -231,7 +233,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkImageTiling>( tiling ),
@@ -275,7 +277,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
+ d.vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -302,7 +304,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceQueueFamilyProperties(
- m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -363,7 +365,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -387,7 +390,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetInstanceProcAddr( m_instance, pName );
+ return d.vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), pName );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -409,7 +412,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceProcAddr( m_device, pName );
+ return d.vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), pName );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -434,7 +437,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
+ return static_cast<Result>( d.vkCreateDevice( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDevice *>( pDevice ) ) );
@@ -491,7 +494,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -597,8 +600,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+ return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -753,7 +756,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+ return static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -827,7 +831,7 @@ namespace VULKAN_HPP_NAMESPACE
Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
+ d.vkGetDeviceQueue( static_cast<VkDevice>( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -854,7 +858,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>(
+ d.vkQueueSubmit( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -880,7 +885,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+ return static_cast<Result>( d.vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) ) );
}
#else
template <typename Dispatch>
@@ -903,7 +908,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+ return static_cast<Result>( d.vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) ) );
}
#else
template <typename Dispatch>
@@ -928,7 +933,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAllocateMemory( m_device,
+ return static_cast<Result>( d.vkAllocateMemory( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
@@ -989,7 +994,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1015,7 +1020,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1044,7 +1049,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkMapMemory( m_device,
+ return static_cast<Result>( d.vkMapMemory( static_cast<VkDevice>( m_device ),
static_cast<VkDeviceMemory>( memory ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkDeviceSize>( size ),
@@ -1082,7 +1087,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+ d.vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ) );
}
template <typename Dispatch>
@@ -1091,7 +1096,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ return static_cast<Result>(
+ d.vkFlushMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1120,7 +1126,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ d.vkInvalidateMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1148,7 +1154,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
+ d.vkGetDeviceMemoryCommitment(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1176,8 +1183,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+ return static_cast<Result>( d.vkBindBufferMemory(
+ static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template <typename Dispatch>
@@ -1205,8 +1212,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+ return static_cast<Result>( d.vkBindImageMemory(
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template <typename Dispatch>
@@ -1232,7 +1239,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements(
+ static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1258,7 +1266,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+ d.vkGetImageMemoryRequirements(
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1285,7 +1294,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSparseMemoryRequirements( m_device,
+ d.vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( image ),
pSparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
@@ -1363,7 +1372,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkSampleCountFlagBits>( samples ),
@@ -1473,8 +1482,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>( d.vkQueueBindSparse(
+ static_cast<VkQueue>( m_queue ), bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1502,7 +1511,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateFence( m_device,
+ return static_cast<Result>( d.vkCreateFence( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkFence *>( pFence ) ) );
@@ -1559,7 +1568,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1585,7 +1594,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1611,7 +1620,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
+ return static_cast<Result>( d.vkResetFences( static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1637,7 +1646,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>( d.vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ) ) );
}
#else
template <typename Dispatch>
@@ -1664,8 +1673,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
+ return static_cast<Result>( d.vkWaitForFences(
+ static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1697,7 +1706,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateSemaphore( m_device,
+ return static_cast<Result>( d.vkCreateSemaphore( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
@@ -1758,7 +1767,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySemaphore(
+ static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1784,7 +1794,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySemaphore(
+ static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1811,7 +1822,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateEvent( m_device,
+ return static_cast<Result>( d.vkCreateEvent( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkEvent *>( pEvent ) ) );
@@ -1868,7 +1879,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1894,7 +1905,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -1919,7 +1930,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+ return static_cast<Result>( d.vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) );
}
#else
template <typename Dispatch>
@@ -1943,7 +1954,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return static_cast<Result>( d.vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) );
}
#else
template <typename Dispatch>
@@ -1967,7 +1978,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return static_cast<Result>( d.vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) );
}
#else
template <typename Dispatch>
@@ -1992,7 +2003,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateQueryPool( m_device,
+ return static_cast<Result>( d.vkCreateQueryPool( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
@@ -2053,7 +2064,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyQueryPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2079,7 +2091,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyQueryPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2110,7 +2123,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
+ return static_cast<Result>( d.vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
static_cast<VkQueryPool>( queryPool ),
firstQuery,
queryCount,
@@ -2189,7 +2202,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateBuffer( m_device,
+ return static_cast<Result>( d.vkCreateBuffer( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkBuffer *>( pBuffer ) ) );
@@ -2246,7 +2259,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2272,7 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2299,7 +2312,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateBufferView( m_device,
+ return static_cast<Result>( d.vkCreateBufferView( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkBufferView *>( pView ) ) );
@@ -2360,7 +2373,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBufferView(
+ static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2386,7 +2400,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBufferView(
+ static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2413,7 +2428,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateImage( m_device,
+ return static_cast<Result>( d.vkCreateImage( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkImage *>( pImage ) ) );
@@ -2470,7 +2485,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2496,7 +2511,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2523,7 +2538,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSubresourceLayout( m_device,
+ d.vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( image ),
reinterpret_cast<const VkImageSubresource *>( pSubresource ),
reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
@@ -2556,7 +2571,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateImageView( m_device,
+ return static_cast<Result>( d.vkCreateImageView( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkImageView *>( pView ) ) );
@@ -2617,7 +2632,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImageView(
+ static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2643,7 +2659,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImageView(
+ static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2670,7 +2687,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateShaderModule( m_device,
+ return static_cast<Result>( d.vkCreateShaderModule( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
@@ -2731,7 +2748,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyShaderModule(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2757,7 +2775,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyShaderModule(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2784,7 +2803,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreatePipelineCache( m_device,
+ return static_cast<Result>( d.vkCreatePipelineCache( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
@@ -2845,7 +2864,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineCache(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2871,7 +2891,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineCache(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2898,7 +2919,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+ return static_cast<Result>(
+ d.vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -2976,8 +2998,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
+ return static_cast<Result>( d.vkMergePipelineCaches(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3009,7 +3031,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
+ return static_cast<Result>( d.vkCreateGraphicsPipelines( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfoCount,
reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
@@ -3212,7 +3234,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateComputePipelines( m_device,
+ return static_cast<Result>( d.vkCreateComputePipelines( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfoCount,
reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
@@ -3412,7 +3434,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipeline(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3438,7 +3461,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipeline(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3465,7 +3489,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
+ return static_cast<Result>( d.vkCreatePipelineLayout( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
@@ -3526,7 +3550,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineLayout(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3552,7 +3577,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineLayout(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3579,7 +3605,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateSampler( m_device,
+ return static_cast<Result>( d.vkCreateSampler( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSampler *>( pSampler ) ) );
@@ -3636,7 +3662,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3662,7 +3688,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3689,7 +3715,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
+ return static_cast<Result>( d.vkCreateDescriptorSetLayout( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
@@ -3750,8 +3776,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorSetLayout(
- m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3778,8 +3805,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorSetLayout(
- m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3807,7 +3835,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
+ return static_cast<Result>( d.vkCreateDescriptorPool( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
@@ -3868,7 +3896,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3894,7 +3923,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -3921,8 +3951,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+ return static_cast<Result>( d.vkResetDescriptorPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
}
#else
template <typename Dispatch>
@@ -3945,8 +3975,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAllocateDescriptorSets(
- m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkAllocateDescriptorSets( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ),
+ reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4052,8 +4083,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkFreeDescriptorSets(
- m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ descriptorSetCount,
+ reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4079,8 +4112,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkFreeDescriptorSets(
- m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ descriptorSetCount,
+ reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4107,7 +4142,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateDescriptorSets( m_device,
+ d.vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ),
descriptorWriteCount,
reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
descriptorCopyCount,
@@ -4141,7 +4176,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateFramebuffer( m_device,
+ return static_cast<Result>( d.vkCreateFramebuffer( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
@@ -4202,7 +4237,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFramebuffer(
+ static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4228,7 +4264,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFramebuffer(
+ static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4255,7 +4292,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateRenderPass( m_device,
+ return static_cast<Result>( d.vkCreateRenderPass( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
@@ -4316,7 +4353,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyRenderPass(
+ static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4342,7 +4380,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyRenderPass(
+ static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4368,7 +4407,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+ d.vkGetRenderAreaGranularity( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4395,7 +4434,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateCommandPool( m_device,
+ return static_cast<Result>( d.vkCreateCommandPool( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
@@ -4456,7 +4495,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCommandPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4482,7 +4522,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCommandPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4509,7 +4550,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+ return static_cast<Result>(
+ d.vkResetCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
}
#else
template <typename Dispatch>
@@ -4535,8 +4577,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAllocateCommandBuffers(
- m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
+ return static_cast<Result>( d.vkAllocateCommandBuffers( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ),
+ reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4642,8 +4685,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeCommandBuffers(
- m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ),
+ static_cast<VkCommandPool>( commandPool ),
+ commandBufferCount,
+ reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4669,8 +4714,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeCommandBuffers(
- m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ),
+ static_cast<VkCommandPool>( commandPool ),
+ commandBufferCount,
+ reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4694,7 +4741,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
+ return static_cast<Result>(
+ d.vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4720,7 +4768,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+ return static_cast<Result>( d.vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) ) );
}
#else
template <typename Dispatch>
@@ -4744,7 +4792,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ return static_cast<Result>( d.vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) ) );
}
#else
template <typename Dispatch>
@@ -4769,7 +4817,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ d.vkCmdBindPipeline(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
template <typename Dispatch>
@@ -4779,7 +4828,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdSetViewport( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4804,7 +4853,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+ d.vkCmdSetScissor( static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4826,7 +4875,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+ d.vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth );
}
template <typename Dispatch>
@@ -4834,21 +4883,21 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ d.vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ d.vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+ d.vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds );
}
template <typename Dispatch>
@@ -4856,7 +4905,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+ d.vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
template <typename Dispatch>
@@ -4864,7 +4913,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ d.vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
template <typename Dispatch>
@@ -4872,7 +4921,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+ d.vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
template <typename Dispatch>
@@ -4886,7 +4935,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindDescriptorSets( m_commandBuffer,
+ d.vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
firstSet,
@@ -4928,7 +4977,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+ d.vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkIndexType>( indexType ) );
}
template <typename Dispatch>
@@ -4939,8 +4991,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindVertexBuffers(
- m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+ d.vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstBinding,
+ bindingCount,
+ reinterpret_cast<const VkBuffer *>( pBuffers ),
+ reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -4976,7 +5031,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+ d.vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance );
}
template <typename Dispatch>
@@ -4988,7 +5043,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+ d.vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
template <typename Dispatch>
@@ -4999,7 +5054,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawIndirect(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
@@ -5010,7 +5066,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawIndexedIndirect(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
@@ -5018,7 +5075,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ d.vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
}
template <typename Dispatch>
@@ -5027,7 +5084,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+ d.vkCmdDispatchIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
template <typename Dispatch>
@@ -5038,7 +5095,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBuffer( m_commandBuffer,
+ d.vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( srcBuffer ),
static_cast<VkBuffer>( dstBuffer ),
regionCount,
@@ -5075,7 +5132,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImage( m_commandBuffer,
+ d.vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkImage>( dstImage ),
@@ -5119,7 +5176,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBlitImage( m_commandBuffer,
+ d.vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkImage>( dstImage ),
@@ -5164,7 +5221,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage( m_commandBuffer,
+ d.vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( srcBuffer ),
static_cast<VkImage>( dstImage ),
static_cast<VkImageLayout>( dstImageLayout ),
@@ -5203,7 +5260,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer( m_commandBuffer,
+ d.vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkBuffer>( dstBuffer ),
@@ -5241,8 +5298,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdUpdateBuffer(
- m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
+ d.vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ static_cast<VkDeviceSize>( dataSize ),
+ pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5273,7 +5333,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+ d.vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ static_cast<VkDeviceSize>( size ),
+ data );
}
template <typename Dispatch>
@@ -5285,7 +5349,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdClearColorImage( m_commandBuffer,
+ d.vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( image ),
static_cast<VkImageLayout>( imageLayout ),
reinterpret_cast<const VkClearColorValue *>( pColor ),
@@ -5324,7 +5388,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdClearDepthStencilImage( m_commandBuffer,
+ d.vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( image ),
static_cast<VkImageLayout>( imageLayout ),
reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
@@ -5363,7 +5427,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdClearAttachments( m_commandBuffer,
+ d.vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ),
attachmentCount,
reinterpret_cast<const VkClearAttachment *>( pAttachments ),
rectCount,
@@ -5399,7 +5463,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResolveImage( m_commandBuffer,
+ d.vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkImage>( dstImage ),
@@ -5438,7 +5502,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ d.vkCmdSetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
template <typename Dispatch>
@@ -5447,7 +5511,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ d.vkCmdResetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
template <typename Dispatch>
@@ -5464,7 +5528,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWaitEvents( m_commandBuffer,
+ d.vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ),
eventCount,
reinterpret_cast<const VkEvent *>( pEvents ),
static_cast<VkPipelineStageFlags>( srcStageMask ),
@@ -5520,7 +5584,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPipelineBarrier( m_commandBuffer,
+ d.vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineStageFlags>( srcStageMask ),
static_cast<VkPipelineStageFlags>( dstStageMask ),
static_cast<VkDependencyFlags>( dependencyFlags ),
@@ -5568,14 +5632,15 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ d.vkCmdBeginQuery(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
@@ -5585,7 +5650,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ d.vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
template <typename Dispatch>
@@ -5595,7 +5660,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdWriteTimestamp(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
@@ -5609,7 +5675,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyQueryPoolResults( m_commandBuffer,
+ d.vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkQueryPool>( queryPool ),
firstQuery,
queryCount,
@@ -5628,7 +5694,12 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+ d.vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkPipelineLayout>( layout ),
+ static_cast<VkShaderStageFlags>( stageFlags ),
+ offset,
+ size,
+ pValues );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5659,7 +5730,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+ d.vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
+ static_cast<VkSubpassContents>( contents ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5681,14 +5754,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+ d.vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndRenderPass( m_commandBuffer );
+ d.vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
template <typename Dispatch>
@@ -5697,7 +5770,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ d.vkCmdExecuteCommands( static_cast<VkCommandBuffer>( m_commandBuffer ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5746,7 +5819,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>(
+ d.vkBindBufferMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5773,7 +5847,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>(
+ d.vkBindImageMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5803,7 +5878,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetDeviceGroupPeerMemoryFeatures(
- m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+ static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5829,7 +5904,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+ d.vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
}
template <typename Dispatch>
@@ -5842,7 +5917,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ d.vkCmdDispatchBase( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
template <typename Dispatch>
@@ -5852,8 +5927,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ),
+ pPhysicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5935,8 +6011,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageMemoryRequirements2(
- m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5982,8 +6059,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetBufferMemoryRequirements2(
- m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6030,7 +6108,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSparseMemoryRequirements2( m_device,
+ d.vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
pSparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
@@ -6105,7 +6183,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+ d.vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6148,7 +6226,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+ d.vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6192,7 +6270,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+ d.vkGetPhysicalDeviceFormatProperties2(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6237,7 +6316,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
}
@@ -6292,7 +6371,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6428,7 +6507,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6474,7 +6554,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
pPropertyCount,
reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
@@ -6550,7 +6630,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ d.vkTrimCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
template <typename Dispatch>
@@ -6559,7 +6639,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
+ d.vkGetDeviceQueue2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6587,7 +6667,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
+ return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
@@ -6650,8 +6730,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySamplerYcbcrConversion(
- m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ),
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6679,8 +6760,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySamplerYcbcrConversion(
- m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ),
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6710,7 +6792,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
@@ -6774,8 +6856,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorUpdateTemplate(
- m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6803,8 +6886,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorUpdateTemplate(
- m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6833,8 +6917,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateDescriptorSetWithTemplate(
- m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+ d.vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSet>( descriptorSet ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -6863,7 +6949,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
}
@@ -6895,7 +6981,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
}
@@ -6928,7 +7014,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
}
@@ -6960,8 +7046,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutSupport(
- m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+ d.vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7013,7 +7100,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndirectCount( m_commandBuffer,
+ d.vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -7032,7 +7119,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
+ d.vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -7048,7 +7135,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateRenderPass2( m_device,
+ return static_cast<Result>( d.vkCreateRenderPass2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
@@ -7109,8 +7196,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass2(
- m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+ d.vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7135,8 +7223,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass2(
- m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
+ reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7160,7 +7249,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7182,7 +7271,7 @@ namespace VULKAN_HPP_NAMESPACE
Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ d.vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
template <typename Dispatch>
@@ -7191,7 +7280,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ return static_cast<Result>( d.vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7219,7 +7308,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+ return static_cast<Result>( d.vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7246,7 +7335,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+ return static_cast<Result>( d.vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7272,7 +7361,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7297,7 +7387,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+ return d.vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7322,7 +7412,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7350,8 +7441,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7434,7 +7525,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
+ return static_cast<Result>( d.vkCreatePrivateDataSlot( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
@@ -7495,7 +7586,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPrivateDataSlot(
+ static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7522,7 +7614,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPrivateDataSlot(
+ static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7552,8 +7645,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
+ return static_cast<Result>( d.vkSetPrivateData(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
}
#else
template <typename Dispatch>
@@ -7584,7 +7677,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
+ d.vkGetPrivateData(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7612,7 +7706,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+ d.vkCmdSetEvent2(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7636,7 +7731,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+ d.vkCmdResetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
template <typename Dispatch>
@@ -7646,8 +7741,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWaitEvents2(
- m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
+ d.vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ eventCount,
+ reinterpret_cast<const VkEvent *>( pEvents ),
+ reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7681,7 +7778,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+ d.vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7705,7 +7802,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdWriteTimestamp2(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
@@ -7715,7 +7813,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>(
+ d.vkQueueSubmit2( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7741,7 +7840,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
+ d.vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7762,7 +7861,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
+ d.vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7783,7 +7882,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
+ d.vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7805,7 +7904,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
+ d.vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7826,7 +7925,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
+ d.vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7847,7 +7946,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
+ d.vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7869,7 +7968,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
+ d.vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7890,21 +7989,21 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndRendering( m_commandBuffer );
+ d.vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+ d.vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+ d.vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
template <typename Dispatch>
@@ -7912,7 +8011,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ d.vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
template <typename Dispatch>
@@ -7921,7 +8020,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdSetViewportWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7944,7 +8043,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+ d.vkCmdSetScissorWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -7972,7 +8071,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindVertexBuffers2( m_commandBuffer,
+ d.vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
bindingCount,
reinterpret_cast<const VkBuffer *>( pBuffers ),
@@ -8028,21 +8127,21 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+ d.vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+ d.vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+ d.vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
template <typename Dispatch>
@@ -8050,14 +8149,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+ d.vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+ d.vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
template <typename Dispatch>
@@ -8069,7 +8168,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilOp( m_commandBuffer,
+ d.vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
static_cast<VkStencilOp>( failOp ),
static_cast<VkStencilOp>( passOp ),
@@ -8082,14 +8181,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
+ d.vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
+ d.vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
template <typename Dispatch>
@@ -8097,7 +8196,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
+ d.vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
template <typename Dispatch>
@@ -8106,8 +8205,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceBufferMemoryRequirements(
- m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8153,8 +8253,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageMemoryRequirements(
- m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8201,7 +8302,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageSparseMemoryRequirements( m_device,
+ d.vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
pSparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
@@ -8280,7 +8381,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySurfaceKHR(
+ static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8306,7 +8408,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySurfaceKHR(
+ static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8334,7 +8437,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
- m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8362,8 +8465,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8392,8 +8496,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ pSurfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8473,8 +8579,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ pPresentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8556,7 +8664,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
+ return static_cast<Result>( d.vkCreateSwapchainKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
@@ -8617,7 +8725,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySwapchainKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8643,7 +8752,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySwapchainKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8670,8 +8780,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
+ return static_cast<Result>( d.vkGetSwapchainImagesKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8751,8 +8861,12 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireNextImageKHR(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
+ return static_cast<Result>( d.vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkSwapchainKHR>( swapchain ),
+ timeout,
+ static_cast<VkSemaphore>( semaphore ),
+ static_cast<VkFence>( fence ),
+ pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8787,7 +8901,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
+ return static_cast<Result>( d.vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8814,8 +8928,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
+ return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8845,7 +8959,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
- m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8875,8 +8989,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8955,7 +9069,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
+ return static_cast<Result>(
+ d.vkAcquireNextImage2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -8990,8 +9105,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9067,8 +9182,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9148,8 +9263,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
+ return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9225,8 +9340,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkDisplayKHR>( display ),
+ pPropertyCount,
+ reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9310,7 +9427,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
+ return static_cast<Result>( d.vkCreateDisplayModeKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkDisplayKHR>( display ),
reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
@@ -9378,8 +9495,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
- m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
+ return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkDisplayModeKHR>( mode ),
+ planeIndex,
+ reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9408,7 +9527,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
+ return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -9473,7 +9592,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
+ return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( static_cast<VkDevice>( m_device ),
swapchainCount,
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
@@ -9654,7 +9773,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
+ return static_cast<Result>( d.vkCreateXlibSurfaceKHR( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -9714,7 +9833,8 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9745,7 +9865,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
+ return static_cast<Result>( d.vkCreateXcbSurfaceKHR( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -9807,7 +9927,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9840,7 +9961,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
+ return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -9901,7 +10022,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, display ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -9932,7 +10054,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
+ return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -9998,7 +10120,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
+ return static_cast<Result>( d.vkCreateWin32SurfaceKHR( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -10057,7 +10179,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex ) );
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -10071,7 +10193,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
+ return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
@@ -10133,7 +10255,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyDebugReportCallbackEXT(
- m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10161,7 +10283,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyDebugReportCallbackEXT(
- m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10193,7 +10315,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDebugReportMessageEXT( m_instance,
+ d.vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugReportFlagsEXT>( flags ),
static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
object,
@@ -10237,7 +10359,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
+ return static_cast<Result>(
+ d.vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10263,7 +10386,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
+ return static_cast<Result>(
+ d.vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10289,7 +10413,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+ d.vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10310,7 +10434,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ d.vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
template <typename Dispatch>
@@ -10318,7 +10442,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+ d.vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10343,8 +10467,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
- m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ),
+ reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10392,7 +10517,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
pVideoFormatPropertyCount,
reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
@@ -10486,7 +10611,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
+ return static_cast<Result>( d.vkCreateVideoSessionKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
@@ -10547,7 +10672,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10574,7 +10700,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10603,7 +10730,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
+ return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionKHR>( videoSession ),
pMemoryRequirementsCount,
reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
@@ -10698,7 +10825,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
+ return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionKHR>( videoSession ),
bindSessionMemoryInfoCount,
reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
@@ -10735,7 +10862,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
+ return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
@@ -10798,7 +10925,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
+ return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
}
@@ -10831,8 +10958,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionParametersKHR(
- m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10859,8 +10987,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionParametersKHR(
- m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10886,7 +11015,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
+ d.vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10908,7 +11037,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
+ d.vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10930,7 +11059,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
+ d.vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10954,7 +11084,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
+ d.vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -10982,7 +11112,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
+ d.vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
bindingCount,
reinterpret_cast<const VkBuffer *>( pBuffers ),
@@ -11034,7 +11164,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
+ d.vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstCounterBuffer,
counterBufferCount,
reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
@@ -11078,7 +11208,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
+ d.vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstCounterBuffer,
counterBufferCount,
reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
@@ -11122,7 +11252,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+ d.vkCmdBeginQueryIndexedEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
template <typename Dispatch>
@@ -11130,7 +11261,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+ d.vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index );
}
template <typename Dispatch>
@@ -11143,7 +11274,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
+ d.vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
instanceCount,
firstInstance,
static_cast<VkBuffer>( counterBuffer ),
@@ -11161,7 +11292,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
+ return static_cast<Result>( d.vkCreateCuModuleNVX( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
@@ -11223,7 +11354,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
+ return static_cast<Result>( d.vkCreateCuFunctionNVX( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
@@ -11284,7 +11415,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuModuleNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11310,7 +11442,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuModuleNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11336,7 +11469,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuFunctionNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11362,7 +11496,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuFunctionNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11387,7 +11522,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
+ d.vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11411,7 +11546,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
+ return d.vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11436,8 +11571,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetImageViewAddressNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11471,7 +11606,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
+ d.vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -11490,7 +11625,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
+ d.vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -11510,7 +11645,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
+ return static_cast<Result>( d.vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( pipeline ),
static_cast<VkShaderStageFlagBits>( shaderStage ),
static_cast<VkShaderInfoTypeAMD>( infoType ),
@@ -11616,7 +11751,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
+ d.vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11637,7 +11772,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndRenderingKHR( m_commandBuffer );
+ d.vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
#if defined( VK_USE_PLATFORM_GGP )
@@ -11651,7 +11786,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
+ return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -11722,7 +11857,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkImageTiling>( tiling ),
@@ -11775,8 +11910,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
+ return static_cast<Result>( d.vkGetMemoryWin32HandleNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11806,7 +11941,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+ d.vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11849,7 +11984,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+ d.vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11894,7 +12029,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceFormatProperties2KHR(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -11941,7 +12076,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
}
@@ -11996,7 +12131,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12132,7 +12267,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12178,7 +12314,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
pPropertyCount,
reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
@@ -12260,7 +12396,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
- m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+ static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12286,7 +12422,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+ d.vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
}
template <typename Dispatch>
@@ -12299,7 +12435,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ d.vkCmdDispatchBaseKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
#if defined( VK_USE_PLATFORM_VI_NN )
@@ -12312,7 +12448,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
+ return static_cast<Result>( d.vkCreateViSurfaceNN( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -12376,7 +12512,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ d.vkTrimCommandPoolKHR( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
//=== VK_KHR_device_group_creation ===
@@ -12388,8 +12524,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ),
+ pPhysicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12473,7 +12610,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
}
@@ -12508,8 +12645,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ return static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12539,7 +12676,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
+ return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
handle,
reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
@@ -12576,7 +12713,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>( d.vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12605,8 +12742,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
- m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
+ return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ fd,
+ reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12637,7 +12776,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
}
@@ -12671,8 +12810,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
+ return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12699,8 +12838,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12730,7 +12869,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
+ return static_cast<Result>(
+ d.vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12757,7 +12897,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>(
+ d.vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12790,7 +12931,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
+ d.vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
set,
@@ -12829,8 +12970,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushDescriptorSetWithTemplateKHR(
- m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+ d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ static_cast<VkPipelineLayout>( layout ),
+ set,
+ pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12862,7 +13006,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
+ d.vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12883,7 +13028,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
+ d.vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
//=== VK_KHR_descriptor_update_template ===
@@ -12896,7 +13041,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
@@ -12960,8 +13105,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorUpdateTemplateKHR(
- m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -12990,8 +13136,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateDescriptorSetWithTemplateKHR(
- m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+ d.vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSet>( descriptorSet ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13023,7 +13171,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
+ d.vkCmdSetViewportWScalingNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13050,7 +13199,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) );
}
#else
template <typename Dispatch>
@@ -13074,7 +13223,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, static_cast<VkDisplayKHR>( display ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13102,7 +13251,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+ return static_cast<Result>(
+ d.vkGetRandROutputDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13154,8 +13304,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13186,8 +13337,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
+ return static_cast<Result>( d.vkDisplayPowerControlEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13216,7 +13367,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
+ return static_cast<Result>( d.vkRegisterDeviceEventEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkFence *>( pFence ) ) );
@@ -13279,7 +13430,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
+ return static_cast<Result>( d.vkRegisterDisplayEventEXT( static_cast<VkDevice>( m_device ),
static_cast<VkDisplayKHR>( display ),
reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
@@ -13346,8 +13497,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
+ return static_cast<Result>( d.vkGetSwapchainCounterEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13378,8 +13529,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
+ return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ),
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13409,7 +13561,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
+ return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ),
static_cast<VkSwapchainKHR>( swapchain ),
pPresentationTimingCount,
reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
@@ -13505,7 +13657,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
+ d.vkCmdSetDiscardRectangleEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13529,7 +13682,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) );
+ d.vkCmdSetDiscardRectangleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( discardRectangleEnable ) );
}
template <typename Dispatch>
@@ -13537,7 +13690,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
+ d.vkCmdSetDiscardRectangleModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
}
//=== VK_EXT_hdr_metadata ===
@@ -13549,8 +13702,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSetHdrMetadataEXT(
- m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
+ d.vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ),
+ swapchainCount,
+ reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ),
+ reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13588,7 +13743,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
+ return static_cast<Result>( d.vkCreateRenderPass2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
@@ -13649,8 +13804,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass2KHR(
- m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+ d.vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13675,8 +13831,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass2KHR(
- m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
+ reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13700,7 +13857,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13725,7 +13882,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>( d.vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template <typename Dispatch>
@@ -13755,7 +13912,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
}
@@ -13789,8 +13946,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
+ return static_cast<Result>( d.vkImportFenceWin32HandleKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13818,7 +13975,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ d.vkGetFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13848,7 +14005,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
+ return static_cast<Result>(
+ d.vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13875,7 +14033,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>( d.vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -13909,7 +14067,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
+ d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
queueFamilyIndex,
pCounterCount,
reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
@@ -14025,8 +14183,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
+ d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ),
+ pNumPasses );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14053,7 +14212,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>(
+ d.vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14078,7 +14238,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkReleaseProfilingLockKHR( m_device );
+ d.vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) );
}
//=== VK_KHR_get_surface_capabilities2 ===
@@ -14090,7 +14250,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
}
@@ -14145,7 +14305,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
pSurfaceFormatCount,
reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
@@ -14334,8 +14494,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14415,8 +14575,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14498,8 +14658,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkDisplayKHR>( display ),
+ pPropertyCount,
+ reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14583,7 +14745,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
+ return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
}
@@ -14619,7 +14781,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
+ return static_cast<Result>( d.vkCreateIOSSurfaceMVK( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -14685,7 +14847,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
+ return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -14748,7 +14910,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
+ return static_cast<Result>(
+ d.vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14774,7 +14937,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
+ return static_cast<Result>(
+ d.vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14800,7 +14964,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+ d.vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14821,7 +14985,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+ d.vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) );
}
template <typename Dispatch>
@@ -14829,7 +14993,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+ d.vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14851,7 +15015,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+ d.vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14872,7 +15036,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+ d.vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
template <typename Dispatch>
@@ -14880,7 +15044,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+ d.vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14905,7 +15069,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
+ return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
@@ -14966,8 +15130,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDebugUtilsMessengerEXT(
- m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ),
+ static_cast<VkDebugUtilsMessengerEXT>( messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -14994,8 +15159,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDebugUtilsMessengerEXT(
- m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ),
+ static_cast<VkDebugUtilsMessengerEXT>( messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15023,7 +15189,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSubmitDebugUtilsMessageEXT( m_instance,
+ d.vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
@@ -15058,8 +15224,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
+ static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15109,8 +15275,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
+ return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15147,7 +15313,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device,
+ return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfoCount,
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ),
@@ -15350,8 +15516,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX(
- m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) );
+ return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( executionGraph ),
+ reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15382,8 +15549,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX(
- m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) );
+ return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( executionGraph ),
+ reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ),
+ pNodeIndex ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15406,25 +15575,35 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
+ VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) );
+ d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkPipeline>( executionGraph ),
+ static_cast<VkDeviceAddress>( scratch ),
+ static_cast<VkDeviceSize>( scratchSize ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
+ d.vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDeviceAddress>( scratch ),
+ static_cast<VkDeviceSize>( scratchSize ),
+ reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
@@ -15433,23 +15612,30 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" );
# endif
- d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
+ d.vkCmdDispatchGraphAMDX( m_commandBuffer,
+ static_cast<VkDeviceAddress>( scratch ),
+ static_cast<VkDeviceSize>( scratchSize ),
+ reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchGraphIndirectAMDX(
- m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
+ d.vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDeviceAddress>( scratch ),
+ static_cast<VkDeviceSize>( scratchSize ),
+ reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
@@ -15458,18 +15644,24 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" );
# endif
- d.vkCmdDispatchGraphIndirectAMDX(
- m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
+ d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer,
+ static_cast<VkDeviceAddress>( scratch ),
+ static_cast<VkDeviceSize>( scratchSize ),
+ reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
+ d.vkCmdDispatchGraphIndirectCountAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDeviceAddress>( scratch ),
+ static_cast<VkDeviceSize>( scratchSize ),
+ static_cast<VkDeviceAddress>( countInfo ) );
}
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -15480,7 +15672,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
+ d.vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15503,8 +15695,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
- m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
+ d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15534,8 +15727,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageMemoryRequirements2KHR(
- m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15581,8 +15775,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetBufferMemoryRequirements2KHR(
- m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15629,7 +15824,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSparseMemoryRequirements2KHR( m_device,
+ d.vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
pSparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
@@ -15710,7 +15905,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
+ return static_cast<Result>( d.vkCreateAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
@@ -15772,8 +15967,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureKHR(
- m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15800,8 +15996,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureKHR(
- m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -15830,7 +16027,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
+ d.vkCmdBuildAccelerationStructuresKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
infoCount,
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
@@ -15872,7 +16069,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
+ d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
infoCount,
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
@@ -15932,7 +16129,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkBuildAccelerationStructuresKHR( m_device,
+ d.vkBuildAccelerationStructuresKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
infoCount,
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
@@ -15981,8 +16178,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
- m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16015,8 +16213,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
- m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16050,8 +16249,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
- m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16089,7 +16289,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
+ return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
accelerationStructureCount,
reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
static_cast<VkQueryType>( queryType ),
@@ -16162,7 +16362,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
+ d.vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16184,7 +16385,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
+ d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16207,7 +16409,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
+ d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16230,8 +16433,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>(
- d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
+ return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16263,7 +16466,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
accelerationStructureCount,
reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
static_cast<VkQueryType>( queryType ),
@@ -16301,7 +16504,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
+ d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
}
@@ -16335,7 +16538,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetAccelerationStructureBuildSizesKHR( m_device,
+ d.vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
pMaxPrimitiveCounts,
@@ -16388,7 +16591,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdTraceRaysKHR( m_commandBuffer,
+ d.vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
@@ -16436,7 +16639,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
+ return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfoCount,
@@ -16672,8 +16875,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16724,8 +16927,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16777,7 +16980,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
+ d.vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
@@ -16815,15 +17018,15 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceSize>(
- d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
+ return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
+ d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize );
}
//=== VK_KHR_sampler_ycbcr_conversion ===
@@ -16836,7 +17039,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
+ return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
@@ -16899,8 +17102,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySamplerYcbcrConversionKHR(
- m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16930,7 +17134,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>(
+ d.vkBindBufferMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16958,7 +17163,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>(
+ d.vkBindImageMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -16987,7 +17193,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17019,7 +17225,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
+ return static_cast<Result>( d.vkCreateValidationCacheEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
@@ -17081,7 +17287,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyValidationCacheEXT(
- m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17109,7 +17315,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyValidationCacheEXT(
- m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17137,8 +17343,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkMergeValidationCachesEXT(
- m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
+ return static_cast<Result>( d.vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkValidationCacheEXT>( dstCache ),
+ srcCacheCount,
+ reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17168,7 +17376,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
+ return static_cast<Result>(
+ d.vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17247,7 +17456,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+ d.vkCmdBindShadingRateImageNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
template <typename Dispatch>
@@ -17258,7 +17468,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetViewportShadingRatePaletteNV(
- m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17285,7 +17495,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
+ d.vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
customSampleOrderCount,
reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
@@ -17320,7 +17530,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
+ return static_cast<Result>( d.vkCreateAccelerationStructureNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
@@ -17382,8 +17592,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureNV(
- m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17410,8 +17621,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureNV(
- m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17439,7 +17651,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
+ d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
}
@@ -17490,8 +17702,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
+ static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17524,7 +17736,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
+ d.vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
static_cast<VkBuffer>( instanceData ),
static_cast<VkDeviceSize>( instanceOffset ),
@@ -17571,7 +17783,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
+ d.vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkAccelerationStructureNV>( dst ),
static_cast<VkAccelerationStructureNV>( src ),
static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
@@ -17595,7 +17807,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdTraceRaysNV( m_commandBuffer,
+ d.vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
static_cast<VkBuffer>( missShaderBindingTableBuffer ),
@@ -17621,7 +17833,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
+ return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfoCount,
reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
@@ -17824,8 +18036,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17874,8 +18086,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
+ return static_cast<Result>( d.vkGetAccelerationStructureHandleNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -17924,7 +18136,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
+ d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
accelerationStructureCount,
reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
static_cast<VkQueryType>( queryType ),
@@ -17963,7 +18175,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+ return static_cast<Result>( d.vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), shader ) );
}
#else
template <typename Dispatch>
@@ -17991,8 +18203,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutSupportKHR(
- m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+ d.vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18046,7 +18259,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
+ d.vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -18065,7 +18278,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
+ d.vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -18084,7 +18297,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
+ return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
pHostPointer,
reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
@@ -18124,7 +18337,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
+ d.vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineStageFlagBits>( pipelineStage ),
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
@@ -18139,8 +18352,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18219,8 +18432,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
- m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
+ return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ),
+ timestampCount,
+ reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ),
+ pTimestamps,
+ pMaxDeviation ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18298,7 +18514,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
+ d.vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask );
}
template <typename Dispatch>
@@ -18309,7 +18525,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawMeshTasksIndirectNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
@@ -18322,7 +18539,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
+ d.vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -18340,8 +18557,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetExclusiveScissorEnableNV(
- m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) );
+ d.vkCmdSetExclusiveScissorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstExclusiveScissor,
+ exclusiveScissorCount,
+ reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18368,7 +18587,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
+ d.vkCmdSetExclusiveScissorNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18393,7 +18613,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
+ d.vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), pCheckpointMarker );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18415,7 +18635,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
+ d.vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18476,7 +18696,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18504,7 +18724,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+ return static_cast<Result>(
+ d.vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18531,7 +18752,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+ return static_cast<Result>( d.vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18559,8 +18780,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
+ return static_cast<Result>( d.vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18585,7 +18806,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUninitializePerformanceApiINTEL( m_device );
+ d.vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) );
}
template <typename Dispatch>
@@ -18593,7 +18814,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
+ return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18619,8 +18841,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
+ return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18646,8 +18868,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
+ return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18675,7 +18897,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
+ return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
}
@@ -18730,7 +18952,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return static_cast<Result>(
+ d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template <typename Dispatch>
@@ -18756,7 +18979,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return static_cast<Result>(
+ d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template <typename Dispatch>
@@ -18782,7 +19006,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return static_cast<Result>(
+ d.vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template <typename Dispatch>
@@ -18810,7 +19035,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
- m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -18840,7 +19065,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
+ d.vkSetLocalDimmingAMD( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
}
#if defined( VK_USE_PLATFORM_FUCHSIA )
@@ -18854,7 +19079,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
+ return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -18920,7 +19145,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
+ return static_cast<Result>( d.vkCreateMetalSurfaceEXT( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -18985,8 +19210,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ pFragmentShadingRateCount,
+ reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19071,8 +19298,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFragmentShadingRateKHR(
- m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ d.vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkExtent2D *>( pFragmentSize ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19098,7 +19326,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( pLocationInfo ) );
+ d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( pLocationInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19122,7 +19351,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer,
+ d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderingInputAttachmentIndexInfoKHR *>( pInputAttachmentIndexInfo ) );
}
@@ -19150,7 +19379,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19178,8 +19408,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19265,7 +19495,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
+ return static_cast<Result>( d.vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
}
#else
template <typename Dispatch>
@@ -19296,7 +19526,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19381,7 +19611,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19471,7 +19701,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
pPresentModeCount,
reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
@@ -19561,7 +19791,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) );
}
# else
template <typename Dispatch>
@@ -19587,7 +19817,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) );
}
# else
template <typename Dispatch>
@@ -19614,8 +19844,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
- m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+ return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19648,7 +19879,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
+ return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -19710,7 +19941,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19735,7 +19967,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+ return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19760,7 +19992,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19787,7 +20020,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+ d.vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
}
//=== VK_EXT_host_query_reset ===
@@ -19799,7 +20032,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ d.vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
//=== VK_EXT_extended_dynamic_state ===
@@ -19808,14 +20041,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+ d.vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+ d.vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
template <typename Dispatch>
@@ -19823,7 +20056,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ d.vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
template <typename Dispatch>
@@ -19832,7 +20065,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdSetViewportWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19855,7 +20088,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+ d.vkCmdSetScissorWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -19883,7 +20116,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
+ d.vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
bindingCount,
reinterpret_cast<const VkBuffer *>( pBuffers ),
@@ -19939,21 +20172,21 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+ d.vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+ d.vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+ d.vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
template <typename Dispatch>
@@ -19961,14 +20194,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+ d.vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+ d.vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
template <typename Dispatch>
@@ -19980,7 +20213,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetStencilOpEXT( m_commandBuffer,
+ d.vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
static_cast<VkStencilOp>( failOp ),
static_cast<VkStencilOp>( passOp ),
@@ -19996,8 +20229,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDeferredOperationKHR(
- m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
+ return static_cast<Result>( d.vkCreateDeferredOperationKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20050,7 +20284,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyDeferredOperationKHR(
- m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20078,7 +20312,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyDeferredOperationKHR(
- m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20104,7 +20338,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+ return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) );
}
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20113,7 +20347,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return static_cast<Result>( d.vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) );
}
#else
template <typename Dispatch>
@@ -20138,7 +20372,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return static_cast<Result>( d.vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) );
}
#else
template <typename Dispatch>
@@ -20170,7 +20404,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
+ return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
pExecutableCount,
reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
@@ -20267,7 +20501,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
+ return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
pStatisticCount,
reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
@@ -20365,7 +20599,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
+ d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
pInternalRepresentationCount,
reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
@@ -20463,7 +20697,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) );
+ return static_cast<Result>(
+ d.vkCopyMemoryToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20489,7 +20724,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) );
+ return static_cast<Result>(
+ d.vkCopyImageToMemoryEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20515,7 +20751,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) );
+ return static_cast<Result>(
+ d.vkCopyImageToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20542,8 +20779,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) );
+ return static_cast<Result>( d.vkTransitionImageLayoutEXT(
+ static_cast<VkDevice>( m_device ), transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20572,7 +20809,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSubresourceLayout2EXT( m_device,
+ d.vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( image ),
reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
@@ -20629,7 +20866,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) );
+ return static_cast<Result>(
+ d.vkMapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20656,7 +20894,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) );
+ return static_cast<Result>( d.vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20684,7 +20922,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) );
+ return static_cast<Result>(
+ d.vkReleaseSwapchainImagesEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20713,7 +20952,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
@@ -20764,7 +21003,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+ d.vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20787,8 +21027,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteGeneratedCommandsNV(
- m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+ d.vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( isPreprocessed ),
+ reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20814,7 +21055,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
+ d.vkCmdBindPipelineShaderGroupNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
}
template <typename Dispatch>
@@ -20825,7 +21067,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
+ return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
@@ -20887,8 +21129,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectCommandsLayoutNV(
- m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20915,8 +21158,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectCommandsLayoutNV(
- m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20944,7 +21188,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) );
+ d.vkCmdSetDepthBias2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -20970,7 +21214,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) ) );
}
#else
template <typename Dispatch>
@@ -20997,7 +21241,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
+ return static_cast<Result>(
+ d.vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21048,7 +21293,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
+ return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
@@ -21109,7 +21354,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPrivateDataSlotEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21139,8 +21385,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
+ return static_cast<Result>( d.vkSetPrivateDataEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
}
#else
template <typename Dispatch>
@@ -21171,7 +21417,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
+ d.vkGetPrivateDataEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21203,7 +21450,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
+ d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ),
reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) );
}
@@ -21264,7 +21511,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetEncodedVideoSessionParametersKHR( m_device,
+ d.vkGetEncodedVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ),
reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ),
pDataSize,
@@ -21451,7 +21698,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
+ d.vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21478,7 +21725,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateCudaModuleNV( m_device,
+ return static_cast<Result>( d.vkCreateCudaModuleNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCudaModuleNV *>( pModule ) ) );
@@ -21540,7 +21787,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) );
+ return static_cast<Result>( d.vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21616,7 +21863,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device,
+ return static_cast<Result>( d.vkCreateCudaFunctionNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) );
@@ -21677,7 +21924,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCudaModuleNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21703,7 +21951,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCudaModuleNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21729,7 +21978,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCudaFunctionNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21755,7 +22005,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCudaFunctionNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21780,7 +22031,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) );
+ d.vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21806,7 +22057,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
+ d.vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21851,7 +22102,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+ d.vkCmdSetEvent2KHR(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21875,7 +22127,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+ d.vkCmdResetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
template <typename Dispatch>
@@ -21885,8 +22137,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWaitEvents2KHR(
- m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
+ d.vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ eventCount,
+ reinterpret_cast<const VkEvent *>( pEvents ),
+ reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21920,7 +22174,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+ d.vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21944,7 +22198,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdWriteTimestamp2KHR(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
@@ -21955,7 +22210,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ d.vkQueueSubmit2KHR( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -21984,8 +22239,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteBufferMarker2AMD(
- m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+ d.vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkPipelineStageFlags2>( stage ),
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ marker );
}
template <typename Dispatch>
@@ -21994,7 +22252,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
+ d.vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22055,7 +22313,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
+ d.vkGetDescriptorSetLayoutSizeEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22082,7 +22341,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
+ d.vkGetDescriptorSetLayoutBindingOffsetEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22110,7 +22370,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
+ d.vkGetDescriptorEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22151,7 +22411,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
+ d.vkCmdBindDescriptorBuffersEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22179,7 +22440,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
+ d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
firstSet,
@@ -22228,7 +22489,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
- m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
}
template <typename Dispatch>
@@ -22236,8 +22497,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+ return static_cast<Result>( d.vkGetBufferOpaqueCaptureDescriptorDataEXT(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22265,8 +22526,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+ return static_cast<Result>( d.vkGetImageOpaqueCaptureDescriptorDataEXT(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22294,8 +22555,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+ return static_cast<Result>( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22323,8 +22584,8 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+ return static_cast<Result>( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22353,7 +22614,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
- m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22385,8 +22646,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFragmentShadingRateEnumNV(
- m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ d.vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkFragmentShadingRateNV>( shadingRate ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
//=== VK_EXT_mesh_shader ===
@@ -22396,7 +22658,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ d.vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
}
template <typename Dispatch>
@@ -22407,7 +22669,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawMeshTasksIndirectEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
@@ -22420,7 +22683,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
+ d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
@@ -22436,7 +22699,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
+ d.vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22458,7 +22721,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
+ d.vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22480,7 +22743,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
+ d.vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22502,7 +22766,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
+ d.vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22524,7 +22789,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
+ d.vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22546,7 +22811,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
+ d.vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22572,7 +22837,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
- m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
}
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -22583,7 +22848,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) );
}
# else
template <typename Dispatch>
@@ -22609,7 +22874,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+ return static_cast<Result>(
+ d.vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22662,7 +22928,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
+ return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -22723,7 +22989,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dfb ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22754,7 +23021,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetVertexInputEXT( m_commandBuffer,
+ d.vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexBindingDescriptionCount,
reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
vertexAttributeDescriptionCount,
@@ -22791,8 +23058,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
+ return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22823,7 +23090,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
+ d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
zirconHandle,
reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
@@ -22864,7 +23131,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
- m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22893,8 +23160,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
+ return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -22928,7 +23195,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
+ return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
@@ -22990,8 +23257,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
- m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
+ return static_cast<Result>(
+ d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( collection ),
+ reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23022,8 +23291,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
- m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
+ return static_cast<Result>(
+ d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( collection ),
+ reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23054,7 +23325,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyBufferCollectionFUCHSIA(
- m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23082,7 +23353,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyBufferCollectionFUCHSIA(
- m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23110,8 +23381,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
- m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( collection ),
+ reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23144,7 +23416,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
- m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
+ static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23171,7 +23443,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
+ d.vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
//=== VK_HUAWEI_invocation_mask ===
@@ -23182,7 +23454,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+ d.vkCmdBindInvocationMaskHUAWEI(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
//=== VK_NV_external_memory_rdma ===
@@ -23194,8 +23467,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
- m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
+ return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ),
+ reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23225,8 +23499,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
- m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
+ return static_cast<Result>( d.vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ),
+ reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23254,7 +23529,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
+ d.vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
}
template <typename Dispatch>
@@ -23262,21 +23537,21 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
+ d.vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
+ d.vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
+ d.vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
}
template <typename Dispatch>
@@ -23284,7 +23559,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
+ d.vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
@@ -23297,7 +23572,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
+ return static_cast<Result>( d.vkCreateScreenSurfaceQNX( static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
@@ -23358,7 +23633,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, window ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23387,7 +23663,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
+ d.vkCmdSetColorWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23411,7 +23687,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+ d.vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
//=== VK_EXT_multi_draw ===
@@ -23425,7 +23701,12 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
+ d.vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ drawCount,
+ reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ),
+ instanceCount,
+ firstInstance,
+ stride );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23459,8 +23740,13 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMultiIndexedEXT(
- m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
+ d.vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ drawCount,
+ reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ),
+ instanceCount,
+ firstInstance,
+ stride,
+ pVertexOffset );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23496,7 +23782,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
+ return static_cast<Result>( d.vkCreateMicromapEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
@@ -23557,7 +23843,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23583,7 +23870,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23609,7 +23897,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
+ d.vkCmdBuildMicromapsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23633,8 +23921,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBuildMicromapsEXT(
- m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
+ return static_cast<Result>( d.vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infoCount,
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23666,8 +23956,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23698,8 +23988,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
- m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23729,8 +24020,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
- m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23764,8 +24056,13 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
- m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
+ return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
+ micromapCount,
+ reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
+ static_cast<VkQueryType>( queryType ),
+ dataSize,
+ pData,
+ stride ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23828,7 +24125,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
+ d.vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23849,7 +24146,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
+ d.vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23871,7 +24168,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
+ d.vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -23897,7 +24194,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
+ d.vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
micromapCount,
reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
static_cast<VkQueryType>( queryType ),
@@ -23934,7 +24231,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceMicromapCompatibilityEXT( m_device,
+ d.vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
}
@@ -23965,7 +24262,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetMicromapBuildSizesEXT( m_device,
+ d.vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
@@ -24000,7 +24297,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ d.vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
}
template <typename Dispatch>
@@ -24009,7 +24306,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+ d.vkCmdDrawClusterIndirectHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
//=== VK_EXT_pageable_device_local_memory ===
@@ -24018,7 +24315,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
+ d.vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), priority );
}
//=== VK_KHR_maintenance4 ===
@@ -24029,8 +24326,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceBufferMemoryRequirementsKHR(
- m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24076,8 +24374,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageMemoryRequirementsKHR(
- m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24124,7 +24423,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
+ d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
pSparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
@@ -24203,7 +24502,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
+ d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
}
@@ -24234,7 +24533,7 @@ namespace VULKAN_HPP_NAMESPACE
Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
+ d.vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( descriptorSet ), ppData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24264,7 +24563,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
+ d.vkCmdCopyMemoryIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
}
template <typename Dispatch>
@@ -24277,7 +24576,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
+ d.vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDeviceAddress>( copyBufferAddress ),
copyCount,
stride,
@@ -24319,7 +24618,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
+ d.vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ decompressRegionCount,
+ reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24345,8 +24646,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDecompressMemoryIndirectCountNV(
- m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
+ d.vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDeviceAddress>( indirectCommandsAddress ),
+ static_cast<VkDeviceAddress>( indirectCommandsCountAddress ),
+ stride );
}
//=== VK_NV_device_generated_commands_compute ===
@@ -24357,8 +24660,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPipelineIndirectMemoryRequirementsNV(
- m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24406,7 +24710,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ d.vkCmdUpdatePipelineIndirectBufferNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
template <typename Dispatch>
@@ -24415,7 +24720,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<DeviceAddress>(
- d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) );
+ d.vkGetPipelineIndirectDeviceAddressNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24441,14 +24746,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
+ d.vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
+ d.vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
}
template <typename Dispatch>
@@ -24456,7 +24761,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+ d.vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
}
template <typename Dispatch>
@@ -24465,7 +24770,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
+ d.vkCmdSetSampleMaskEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24496,21 +24802,21 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
+ d.vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
+ d.vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
+ d.vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
}
template <typename Dispatch>
@@ -24520,7 +24826,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
+ d.vkCmdSetColorBlendEnableEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24546,8 +24853,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorBlendEquationEXT(
- m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
+ d.vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ attachmentCount,
+ reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24575,7 +24884,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
+ d.vkCmdSetColorWriteMaskEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24601,14 +24911,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+ d.vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
+ d.vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
}
template <typename Dispatch>
@@ -24617,7 +24927,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+ d.vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
}
template <typename Dispatch>
@@ -24625,14 +24936,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
+ d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
+ d.vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
}
template <typename Dispatch>
@@ -24640,7 +24951,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
+ d.vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
}
template <typename Dispatch>
@@ -24650,8 +24961,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorBlendAdvancedEXT(
- m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
+ d.vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ attachmentCount,
+ reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24677,7 +24990,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+ d.vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
}
template <typename Dispatch>
@@ -24685,14 +24998,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+ d.vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
+ d.vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
}
template <typename Dispatch>
@@ -24700,7 +25013,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
+ d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
}
template <typename Dispatch>
@@ -24708,7 +25021,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
+ d.vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
}
template <typename Dispatch>
@@ -24718,7 +25031,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
+ d.vkCmdSetViewportSwizzleNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24744,14 +25058,14 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
+ d.vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
+ d.vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
}
template <typename Dispatch>
@@ -24759,7 +25073,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+ d.vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
}
template <typename Dispatch>
@@ -24767,7 +25081,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
+ d.vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageModulationTableEnable ) );
}
template <typename Dispatch>
@@ -24776,7 +25090,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+ d.vkCmdSetCoverageModulationTableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24799,7 +25113,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
+ d.vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
}
template <typename Dispatch>
@@ -24807,7 +25121,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
+ d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( representativeFragmentTestEnable ) );
}
template <typename Dispatch>
@@ -24815,7 +25129,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+ d.vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
}
//=== VK_EXT_shader_module_identifier ===
@@ -24826,7 +25140,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
+ d.vkGetShaderModuleIdentifierEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24852,8 +25167,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetShaderModuleCreateInfoIdentifierEXT(
- m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
+ d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -24887,7 +25203,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
pFormatCount,
reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
@@ -24984,7 +25300,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
+ return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
@@ -25045,7 +25361,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyOpticalFlowSessionNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25072,7 +25389,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyOpticalFlowSessionNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25102,7 +25420,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
+ return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ),
static_cast<VkOpticalFlowSessionNV>( session ),
static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
static_cast<VkImageView>( view ),
@@ -25140,8 +25458,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdOpticalFlowExecuteNV(
- m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
+ d.vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25170,7 +25489,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindIndexBuffer2KHR( m_commandBuffer,
+ d.vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkDeviceSize>( size ),
@@ -25183,8 +25502,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetRenderingAreaGranularityKHR(
- m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+ d.vkGetRenderingAreaGranularityKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ),
+ reinterpret_cast<VkExtent2D *>( pGranularity ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25211,8 +25531,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageSubresourceLayoutKHR(
- m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
+ d.vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ),
+ reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25257,7 +25578,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSubresourceLayout2KHR( m_device,
+ d.vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( image ),
reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
@@ -25312,7 +25633,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkAntiLagUpdateAMD( m_device, reinterpret_cast<const VkAntiLagDataAMD *>( pData ) );
+ d.vkAntiLagUpdateAMD( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAntiLagDataAMD *>( pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25338,7 +25659,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateShadersEXT( m_device,
+ return static_cast<Result>( d.vkCreateShadersEXT( static_cast<VkDevice>( m_device ),
createInfoCount,
reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
@@ -25525,7 +25846,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyShaderEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25551,7 +25873,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyShaderEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25576,7 +25899,7 @@ namespace VULKAN_HPP_NAMESPACE
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) );
+ return static_cast<Result>( d.vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25652,8 +25975,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindShadersEXT(
- m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) );
+ d.vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ stageCount,
+ reinterpret_cast<const VkShaderStageFlagBits *>( pStages ),
+ reinterpret_cast<const VkShaderEXT *>( pShaders ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25688,8 +26013,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDepthClampRangeEXT(
- m_commandBuffer, static_cast<VkDepthClampModeEXT>( depthClampMode ), reinterpret_cast<const VkDepthClampRangeEXT *>( pDepthClampRange ) );
+ d.vkCmdSetDepthClampRangeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDepthClampModeEXT>( depthClampMode ),
+ reinterpret_cast<const VkDepthClampRangeEXT *>( pDepthClampRange ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -25720,7 +26046,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device,
+ return static_cast<Result>( d.vkCreatePipelineBinariesKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( pBinaries ) ) );
@@ -25975,7 +26301,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineBinaryKHR( m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineBinaryKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26002,7 +26329,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineBinaryKHR( m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineBinaryKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26029,8 +26357,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelineKeyKHR(
- m_device, reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ), reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) );
+ return static_cast<Result>( d.vkGetPipelineKeyKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ),
+ reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26062,7 +26391,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device,
+ return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( pInfo ),
reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineBinaryKey ),
pPipelineBinaryDataSize,
@@ -26152,8 +26481,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR(
- m_device, reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) );
+ return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26183,8 +26513,10 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
- m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
+ static_cast<VkFramebuffer>( framebuffer ),
+ pPropertiesCount,
+ reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26263,8 +26595,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
- m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ),
+ reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26293,8 +26626,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) );
+ return static_cast<Result>( d.vkSetLatencySleepModeNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26322,8 +26655,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) );
+ return static_cast<Result>( d.vkLatencySleepNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26347,7 +26680,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
+ d.vkSetLatencyMarkerNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26371,7 +26705,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
+ d.vkGetLatencyTimingsNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26425,7 +26760,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) );
+ d.vkQueueNotifyOutOfBandNV( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26450,7 +26785,7 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26534,7 +26869,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) );
+ d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageAspectFlags>( aspectMask ) );
}
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
@@ -26546,7 +26881,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetScreenBufferPropertiesQNX( static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26594,7 +26930,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetLineStippleKHR( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+ d.vkCmdSetLineStippleKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
}
//=== VK_KHR_calibrated_timestamps ===
@@ -26605,8 +26941,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26685,8 +27021,11 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetCalibratedTimestampsKHR(
- m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
+ return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( static_cast<VkDevice>( m_device ),
+ timestampCount,
+ reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ),
+ pTimestamps,
+ pMaxDeviation ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26765,7 +27104,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) );
+ d.vkCmdBindDescriptorSets2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26787,7 +27127,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) );
+ d.vkCmdPushConstants2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26809,7 +27149,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) );
+ d.vkCmdPushDescriptorSet2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26832,7 +27173,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer,
+ d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( pPushDescriptorSetWithTemplateInfo ) );
}
@@ -26858,7 +27199,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) );
+ d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26883,7 +27225,8 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(
- m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) );
+ static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26911,7 +27254,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device,
+ d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( pInfo ),
reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
@@ -26963,8 +27306,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPreprocessGeneratedCommandsEXT(
- m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ), static_cast<VkCommandBuffer>( stateCommandBuffer ) );
+ d.vkCmdPreprocessGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ),
+ static_cast<VkCommandBuffer>( stateCommandBuffer ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -26989,8 +27333,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteGeneratedCommandsEXT(
- m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ) );
+ d.vkCmdExecuteGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( isPreprocessed ),
+ reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -27017,7 +27362,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( m_device,
+ return static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkIndirectCommandsLayoutEXT *>( pIndirectCommandsLayout ) ) );
@@ -27079,8 +27424,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectCommandsLayoutEXT(
- m_device, static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -27107,8 +27453,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectCommandsLayoutEXT(
- m_device, static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -27137,7 +27484,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( m_device,
+ return static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkIndirectExecutionSetEXT *>( pIndirectExecutionSet ) ) );
@@ -27199,8 +27546,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectExecutionSetEXT(
- m_device, static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -27227,8 +27575,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectExecutionSetEXT(
- m_device, static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -27256,7 +27605,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateIndirectExecutionSetPipelineEXT( m_device,
+ d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ),
executionSetWriteCount,
reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( pExecutionSetWrites ) );
@@ -27289,7 +27638,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateIndirectExecutionSetShaderEXT( m_device,
+ d.vkUpdateIndirectExecutionSetShaderEXT( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ),
executionSetWriteCount,
reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( pExecutionSetWrites ) );