aboutsummaryrefslogtreecommitdiffhomepage
path: root/include
diff options
context:
space:
mode:
authorJon Leech <[email protected]>2023-10-20 03:59:17 -0700
committerJon Leech <[email protected]>2023-10-20 04:02:25 -0700
commit374f9fd97520f6dd1b80745de09208d878ab4a52 (patch)
tree090c57a188d4909c09d19270bacf8a6deee4b90e /include
parentf4bfcd885214675a6a0d7d4df07f52b511e6ea16 (diff)
downloadVulkan-Headers-374f9fd97520f6dd1b80745de09208d878ab4a52.tar.gz
Vulkan-Headers-374f9fd97520f6dd1b80745de09208d878ab4a52.zip
Update for Vulkan-Docs 1.3.269v1.3.269
Diffstat (limited to 'include')
-rw-r--r--include/vulkan/vulkan.cppm48
-rw-r--r--include/vulkan/vulkan.hpp181
-rw-r--r--include/vulkan/vulkan_core.h136
-rw-r--r--include/vulkan/vulkan_enums.hpp42
-rw-r--r--include/vulkan/vulkan_extension_inspection.hpp34
-rw-r--r--include/vulkan/vulkan_funcs.hpp298
-rw-r--r--include/vulkan/vulkan_handles.hpp336
-rw-r--r--include/vulkan/vulkan_hash.hpp158
-rw-r--r--include/vulkan/vulkan_raii.hpp350
-rw-r--r--include/vulkan/vulkan_shared.hpp70
-rw-r--r--include/vulkan/vulkan_static_assertions.hpp67
-rw-r--r--include/vulkan/vulkan_structs.hpp982
-rw-r--r--include/vulkan/vulkan_to_string.hpp43
13 files changed, 2698 insertions, 47 deletions
diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm
index 9a2cad5..a8dfe78 100644
--- a/include/vulkan/vulkan.cppm
+++ b/include/vulkan/vulkan.cppm
@@ -784,6 +784,10 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ //=== VK_ARM_scheduling_controls ===
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagBitsARM;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM;
+
//=== VK_NV_memory_decompression ===
using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV;
using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV;
@@ -2260,6 +2264,15 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ using VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV;
+ using VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV;
+ using VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_NV_low_latency ===
using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV;
@@ -2517,6 +2530,11 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_ARM_shader_core_properties ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM;
+ //=== VK_ARM_scheduling_controls ===
+ using VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM;
+ using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM;
+
//=== VK_EXT_image_sliced_view_of_3d ===
using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
@@ -2816,6 +2834,12 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ using VULKAN_HPP_NAMESPACE::CudaFunctionNV;
+ using VULKAN_HPP_NAMESPACE::CudaModuleNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
@@ -2874,6 +2898,9 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_swapchain ===
using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR;
+ //=== VK_KHR_display ===
+ using VULKAN_HPP_NAMESPACE::UniqueDisplayKHR;
+
//=== VK_EXT_debug_report ===
using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT;
@@ -2897,12 +2924,21 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_ray_tracing ===
using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV;
+ //=== VK_INTEL_performance_query ===
+ using VULKAN_HPP_NAMESPACE::UniquePerformanceConfigurationINTEL;
+
//=== VK_KHR_deferred_host_operations ===
using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR;
//=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ using VULKAN_HPP_NAMESPACE::UniqueCudaFunctionNV;
+ using VULKAN_HPP_NAMESPACE::UniqueCudaModuleNV;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA;
@@ -3001,6 +3037,12 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ using VULKAN_HPP_NAMESPACE::SharedCudaFunctionNV;
+ using VULKAN_HPP_NAMESPACE::SharedCudaModuleNV;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA;
@@ -3175,6 +3217,12 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ using VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV;
+ using VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA;
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
index 37f7227..058c304 100644
--- a/include/vulkan/vulkan.hpp
+++ b/include/vulkan/vulkan.hpp
@@ -55,7 +55,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
-static_assert( VK_HEADER_VERSION == 268, "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 269, "Wrong VK_HEADER_VERSION!" );
// <tuple> includes <sys/sysmacros.h> through some other header
// this results in major(x) being resolved to gnu_dev_major(x)
@@ -94,6 +94,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data ) {}
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string const & data ) VULKAN_HPP_NOEXCEPT
+ {
+ copy( data.data(), data.length() );
+ }
+
+#if 17 <= VULKAN_HPP_CPP_VERSION
+ template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+ VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string_view data ) VULKAN_HPP_NOEXCEPT
+ {
+ copy( data.data(), data.length() );
+ }
+#endif
+
#if ( VK_USE_64_BIT_PTR_DEFINES == 0 )
// on 32 bit compiles, needs overloads on index type int to resolve ambiguities
VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT
@@ -120,14 +134,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
operator std::string() const
{
- return std::string( this->data() );
+ return std::string( this->data(), N );
}
#if 17 <= VULKAN_HPP_CPP_VERSION
template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
operator std::string_view() const
{
- return std::string_view( this->data() );
+ return std::string_view( this->data(), N );
}
#endif
@@ -174,6 +188,20 @@ namespace VULKAN_HPP_NAMESPACE
{
return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
}
+
+ private:
+ VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT
+ {
+ size_t n = std::min( N, len );
+ for ( size_t i = 0; i < n; ++i )
+ {
+ ( *this )[i] = data[i];
+ }
+ for ( size_t i = n; i < N; ++i )
+ {
+ ( *this )[i] = 0;
+ }
+ }
};
// specialization of relational operators between std::string and arrays of chars
@@ -4749,6 +4777,46 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ VkResult vkCreateCudaModuleNV( VkDevice device,
+ const VkCudaModuleCreateInfoNV * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule );
+ }
+
+ VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData );
+ }
+
+ VkResult vkCreateCudaFunctionNV( VkDevice device,
+ const VkCudaFunctionCreateInfoNV * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction );
+ }
+
+ void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyCudaModuleNV( device, module, pAllocator );
+ }
+
+ void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyCudaFunctionNV( device, function, pAllocator );
+ }
+
+ void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo );
+ }
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
@@ -10965,6 +11033,34 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ template <>
+ struct StructExtends<PhysicalDeviceCudaKernelLaunchFeaturesNV, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceCudaKernelLaunchFeaturesNV, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceCudaKernelLaunchPropertiesNV, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_NV_low_latency ===
template <>
struct StructExtends<QueryLowLatencySupportNV, SemaphoreCreateInfo>
@@ -12275,6 +12371,48 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_ARM_scheduling_controls ===
+ template <>
+ struct StructExtends<DeviceQueueShaderCoreControlCreateInfoARM, DeviceQueueCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<DeviceQueueShaderCoreControlCreateInfoARM, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceSchedulingControlsFeaturesARM, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceSchedulingControlsFeaturesARM, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceSchedulingControlsPropertiesARM, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_image_sliced_view_of_3d ===
template <>
struct StructExtends<PhysicalDeviceImageSlicedViewOf3DFeaturesEXT, PhysicalDeviceFeatures2>
@@ -14436,6 +14574,23 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0;
+ PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0;
+ PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0;
+ PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0;
+ PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0;
+ PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0;
+#else
+ PFN_dummy vkCreateCudaModuleNV_placeholder = 0;
+ PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0;
+ PFN_dummy vkCreateCudaFunctionNV_placeholder = 0;
+ PFN_dummy vkDestroyCudaModuleNV_placeholder = 0;
+ PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0;
+ PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
@@ -15760,6 +15915,16 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) );
+ vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) );
+ vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) );
+ vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) );
+ vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) );
+ vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) );
@@ -16801,6 +16966,16 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) );
+ vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) );
+ vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) );
+ vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) );
+ vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) );
+ vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
index 77bdf09..904ac6f 100644
--- a/include/vulkan/vulkan_core.h
+++ b/include/vulkan/vulkan_core.h
@@ -69,7 +69,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 268
+#define VK_HEADER_VERSION 269
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@@ -902,6 +902,11 @@ typedef enum VkStructureType {
#endif
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+ VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV = 1000307000,
+ VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV = 1000307001,
+ VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV = 1000307002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV = 1000307003,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV = 1000307004,
VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000,
VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000,
VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001,
@@ -1035,6 +1040,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000,
+ VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000,
VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000,
@@ -1420,6 +1428,8 @@ typedef enum VkObjectType {
VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
+ VK_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000,
+ VK_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001,
VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
@@ -10768,6 +10778,8 @@ typedef enum VkDebugReportObjectTypeEXT {
VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001,
VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000,
VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000,
+ VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000,
+ VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001,
VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000,
VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
@@ -14917,6 +14929,98 @@ typedef struct VkDeviceDiagnosticsConfigCreateInfoNV {
#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops"
+// VK_NV_cuda_kernel_launch is a preprocessor guard. Do not pass it to API calls.
+#define VK_NV_cuda_kernel_launch 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaModuleNV)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaFunctionNV)
+#define VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION 2
+#define VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME "VK_NV_cuda_kernel_launch"
+typedef struct VkCudaModuleCreateInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ size_t dataSize;
+ const void* pData;
+} VkCudaModuleCreateInfoNV;
+
+typedef struct VkCudaFunctionCreateInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkCudaModuleNV module;
+ const char* pName;
+} VkCudaFunctionCreateInfoNV;
+
+typedef struct VkCudaLaunchInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkCudaFunctionNV function;
+ uint32_t gridDimX;
+ uint32_t gridDimY;
+ uint32_t gridDimZ;
+ uint32_t blockDimX;
+ uint32_t blockDimY;
+ uint32_t blockDimZ;
+ uint32_t sharedMemBytes;
+ size_t paramCount;
+ const void* const * pParams;
+ size_t extraCount;
+ const void* const * pExtras;
+} VkCudaLaunchInfoNV;
+
+typedef struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 cudaKernelLaunchFeatures;
+} VkPhysicalDeviceCudaKernelLaunchFeaturesNV;
+
+typedef struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t computeCapabilityMinor;
+ uint32_t computeCapabilityMajor;
+} VkPhysicalDeviceCudaKernelLaunchPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaModuleNV)(VkDevice device, const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule);
+typedef VkResult (VKAPI_PTR *PFN_vkGetCudaModuleCacheNV)(VkDevice device, VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaFunctionNV)(VkDevice device, const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction);
+typedef void (VKAPI_PTR *PFN_vkDestroyCudaModuleNV)(VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDestroyCudaFunctionNV)(VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdCudaLaunchKernelNV)(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaModuleNV(
+ VkDevice device,
+ const VkCudaModuleCreateInfoNV* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkCudaModuleNV* pModule);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetCudaModuleCacheNV(
+ VkDevice device,
+ VkCudaModuleNV module,
+ size_t* pCacheSize,
+ void* pCacheData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaFunctionNV(
+ VkDevice device,
+ const VkCudaFunctionCreateInfoNV* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkCudaFunctionNV* pFunction);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCudaModuleNV(
+ VkDevice device,
+ VkCudaModuleNV module,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCudaFunctionNV(
+ VkDevice device,
+ VkCudaFunctionNV function,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCudaLaunchKernelNV(
+ VkCommandBuffer commandBuffer,
+ const VkCudaLaunchInfoNV* pLaunchInfo);
+#endif
+
+
// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_low_latency 1
#define VK_NV_LOW_LATENCY_SPEC_VERSION 1
@@ -16475,6 +16579,36 @@ typedef struct VkPhysicalDeviceShaderCorePropertiesARM {
+// VK_ARM_scheduling_controls is a preprocessor guard. Do not pass it to API calls.
+#define VK_ARM_scheduling_controls 1
+#define VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION 1
+#define VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME "VK_ARM_scheduling_controls"
+typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagsARM;
+
+typedef enum VkPhysicalDeviceSchedulingControlsFlagBitsARM {
+ VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM = 0x00000001,
+ VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FLAG_BITS_MAX_ENUM_ARM = 0x7FFFFFFF
+} VkPhysicalDeviceSchedulingControlsFlagBitsARM;
+typedef struct VkDeviceQueueShaderCoreControlCreateInfoARM {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t shaderCoreCount;
+} VkDeviceQueueShaderCoreControlCreateInfoARM;
+
+typedef struct VkPhysicalDeviceSchedulingControlsFeaturesARM {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 schedulingControls;
+} VkPhysicalDeviceSchedulingControlsFeaturesARM;
+
+typedef struct VkPhysicalDeviceSchedulingControlsPropertiesARM {
+ VkStructureType sType;
+ void* pNext;
+ VkPhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags;
+} VkPhysicalDeviceSchedulingControlsPropertiesARM;
+
+
+
// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls.
#define VK_EXT_image_sliced_view_of_3d 1
#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1
diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp
index d243cd4..d928695 100644
--- a/include/vulkan/vulkan_enums.hpp
+++ b/include/vulkan/vulkan_enums.hpp
@@ -1089,7 +1089,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
- eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eCudaModuleCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV,
+ eCudaFunctionCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV,
+ eCudaLaunchInfoNV = VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV,
+ ePhysicalDeviceCudaKernelLaunchFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV,
+ ePhysicalDeviceCudaKernelLaunchPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV,
#if defined( VK_USE_PLATFORM_METAL_EXT )
eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT,
eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT,
@@ -1260,6 +1267,9 @@ namespace VULKAN_HPP_NAMESPACE
eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR,
eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR,
ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM,
+ eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM,
+ ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM,
+ ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM,
ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT,
eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT,
ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE,
@@ -1430,6 +1440,10 @@ namespace VULKAN_HPP_NAMESPACE
eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eCudaModuleNV = VK_OBJECT_TYPE_CUDA_MODULE_NV,
+ eCudaFunctionNV = VK_OBJECT_TYPE_CUDA_FUNCTION_NV,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -4660,6 +4674,10 @@ namespace VULKAN_HPP_NAMESPACE
eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eCudaModuleNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV,
+ eCudaFunctionNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -6753,6 +6771,22 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ //=== VK_ARM_scheduling_controls ===
+
+ enum class PhysicalDeviceSchedulingControlsFlagBitsARM : VkPhysicalDeviceSchedulingControlsFlagsARM
+ {
+ eShaderCoreCount = VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM
+ };
+
+ using PhysicalDeviceSchedulingControlsFlagsARM = Flags<PhysicalDeviceSchedulingControlsFlagBitsARM>;
+
+ template <>
+ struct FlagTraits<PhysicalDeviceSchedulingControlsFlagBitsARM>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR PhysicalDeviceSchedulingControlsFlagsARM allFlags = PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount;
+ };
+
//=== VK_NV_memory_decompression ===
enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV
@@ -7289,6 +7323,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ case VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+ case VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
case VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp
index d010468..f3fef6d 100644
--- a/include/vulkan/vulkan_extension_inspection.hpp
+++ b/include/vulkan/vulkan_extension_inspection.hpp
@@ -297,7 +297,10 @@ namespace VULKAN_HPP_NAMESPACE
"VK_KHR_video_encode_queue",
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
"VK_NV_device_diagnostics_config",
-"VK_QCOM_render_pass_store_ops",
+"VK_QCOM_render_pass_store_ops",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_NV_cuda_kernel_launch",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
"VK_NV_low_latency",
#if defined( VK_USE_PLATFORM_METAL_EXT )
"VK_EXT_metal_objects",
@@ -364,6 +367,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_EXT_pageable_device_local_memory",
"VK_KHR_maintenance4",
"VK_ARM_shader_core_properties",
+"VK_ARM_scheduling_controls",
"VK_EXT_image_sliced_view_of_3d",
"VK_VALVE_descriptor_set_host_mapping",
"VK_EXT_depth_clamp_zero_one",
@@ -773,6 +777,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_EXT_pageable_device_local_memory", { { "VK_VERSION_1_0", { { "VK_EXT_memory_priority", } } } } },
{ "VK_KHR_maintenance4", { { "VK_VERSION_1_1", { { } } } } },
{ "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", { { } } } } },
+{ "VK_ARM_scheduling_controls", { { "VK_VERSION_1_0", { { "VK_ARM_shader_core_builtins", } } } } },
{ "VK_EXT_image_sliced_view_of_3d", { { "VK_VERSION_1_0", { { "VK_KHR_maintenance1", "VK_KHR_get_physical_device_properties2", } } } } },
{ "VK_VALVE_descriptor_set_host_mapping", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
{ "VK_EXT_depth_clamp_zero_one", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
@@ -1480,7 +1485,11 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|| ( extension == "VK_KHR_video_encode_queue" )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- || ( extension == "VK_NV_device_diagnostics_config" ) || ( extension == "VK_QCOM_render_pass_store_ops" ) || ( extension == "VK_NV_low_latency" )
+ || ( extension == "VK_NV_device_diagnostics_config" ) || ( extension == "VK_QCOM_render_pass_store_ops" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ || ( extension == "VK_NV_cuda_kernel_launch" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ || ( extension == "VK_NV_low_latency" )
#if defined( VK_USE_PLATFORM_METAL_EXT )
|| ( extension == "VK_EXT_metal_objects" )
#endif /*VK_USE_PLATFORM_METAL_EXT*/
@@ -1517,16 +1526,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|| ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) ||
( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) ||
- ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) ||
- ( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) ||
- ( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_QCOM_fragment_density_map_offset" ) ||
- ( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) ||
- ( extension == "VK_NV_device_generated_commands_compute" ) || ( extension == "VK_NV_linear_color_attachment" ) ||
- ( extension == "VK_EXT_image_compression_control_swapchain" ) || ( extension == "VK_QCOM_image_processing" ) ||
- ( extension == "VK_EXT_nested_command_buffer" ) || ( extension == "VK_EXT_external_memory_acquire_unmodified" ) ||
- ( extension == "VK_EXT_extended_dynamic_state3" ) || ( extension == "VK_EXT_subpass_merge_feedback" ) ||
- ( extension == "VK_EXT_shader_module_identifier" ) || ( extension == "VK_EXT_rasterization_order_attachment_access" ) ||
- ( extension == "VK_NV_optical_flow" ) || ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" )
+ ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_ARM_scheduling_controls" ) ||
+ ( extension == "VK_EXT_image_sliced_view_of_3d" ) || ( extension == "VK_VALVE_descriptor_set_host_mapping" ) ||
+ ( extension == "VK_EXT_depth_clamp_zero_one" ) || ( extension == "VK_EXT_non_seamless_cube_map" ) ||
+ ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_NV_copy_memory_indirect" ) ||
+ ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_device_generated_commands_compute" ) ||
+ ( extension == "VK_NV_linear_color_attachment" ) || ( extension == "VK_EXT_image_compression_control_swapchain" ) ||
+ ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_EXT_nested_command_buffer" ) ||
+ ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) ||
+ ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) ||
+ ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) ||
+ ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" )
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|| ( extension == "VK_ANDROID_external_format_resolve" )
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp
index 32715f8..ae420ff 100644
--- a/include/vulkan/vulkan_funcs.hpp
+++ b/include/vulkan/vulkan_funcs.hpp
@@ -19210,6 +19210,304 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateCudaModuleNV( m_device,
+ reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkCudaModuleNV *>( pModule ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type
+ Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::CudaModuleNV module;
+ VkResult result =
+ d.vkCreateCudaModuleNV( m_device,
+ reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCudaModuleNV *>( &module ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type
+ Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::CudaModuleNV module;
+ VkResult result =
+ d.vkCreateCudaModuleNV( m_device,
+ reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCudaModuleNV *>( &module ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /* VULKAN_HPP_NO_SMART_HANDLE */
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ size_t * pCacheSize,
+ void * pCacheData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Uint8_tAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<uint8_t, Uint8_tAllocator> cacheData;
+ size_t cacheSize;
+ VkResult result;
+ do
+ {
+ result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && cacheSize )
+ {
+ cacheData.resize( cacheSize );
+ result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" );
+ VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
+ if ( cacheSize < cacheData.size() )
+ {
+ cacheData.resize( cacheSize );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), cacheData );
+ }
+
+ template <typename Uint8_tAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator );
+ size_t cacheSize;
+ VkResult result;
+ do
+ {
+ result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && cacheSize )
+ {
+ cacheData.resize( cacheSize );
+ result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" );
+ VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
+ if ( cacheSize < cacheData.size() )
+ {
+ cacheData.resize( cacheSize );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), cacheData );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device,
+ reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type
+ Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
+ VkResult result =
+ d.vkCreateCudaFunctionNV( m_device,
+ reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCudaFunctionNV *>( &function ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type
+ Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
+ VkResult result =
+ d.vkCreateCudaFunctionNV( m_device,
+ reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCudaFunctionNV *>( &function ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /* VULKAN_HPP_NO_SMART_HANDLE */
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyCudaModuleNV( m_device,
+ static_cast<VkCudaModuleNV>( module ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyCudaModuleNV( m_device,
+ static_cast<VkCudaModuleNV>( module ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyCudaFunctionNV( m_device,
+ static_cast<VkCudaFunctionNV>( function ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyCudaFunctionNV( m_device,
+ static_cast<VkCudaFunctionNV>( function ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) );
+ }
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp
index 3325616..526d99c 100644
--- a/include/vulkan/vulkan_handles.hpp
+++ b/include/vulkan/vulkan_handles.hpp
@@ -1261,6 +1261,15 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceDiagnosticsConfigFeaturesNV;
struct DeviceDiagnosticsConfigCreateInfoNV;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ struct CudaModuleCreateInfoNV;
+ struct CudaFunctionCreateInfoNV;
+ struct CudaLaunchInfoNV;
+ struct PhysicalDeviceCudaKernelLaunchFeaturesNV;
+ struct PhysicalDeviceCudaKernelLaunchPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_NV_low_latency ===
struct QueryLowLatencySupportNV;
@@ -1518,6 +1527,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_ARM_shader_core_properties ===
struct PhysicalDeviceShaderCorePropertiesARM;
+ //=== VK_ARM_scheduling_controls ===
+ struct DeviceQueueShaderCoreControlCreateInfoARM;
+ struct PhysicalDeviceSchedulingControlsFeaturesARM;
+ struct PhysicalDeviceSchedulingControlsPropertiesARM;
+
//=== VK_EXT_image_sliced_view_of_3d ===
struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
struct ImageViewSlicedCreateInfoEXT;
@@ -1815,6 +1829,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
class IndirectCommandsLayoutNV;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ class CudaModuleNV;
+ class CudaFunctionNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
class BufferCollectionFUCHSIA;
@@ -2041,6 +2061,15 @@ namespace VULKAN_HPP_NAMESPACE
};
using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ //=== VK_KHR_display ===
+ template <typename Dispatch>
+ class UniqueHandleTraits<DisplayKHR, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<PhysicalDevice, Dispatch>;
+ };
+ using UniqueDisplayKHR = UniqueHandle<DisplayKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+
//=== VK_EXT_debug_report ===
template <typename Dispatch>
class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch>
@@ -2118,6 +2147,15 @@ namespace VULKAN_HPP_NAMESPACE
};
using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ //=== VK_INTEL_performance_query ===
+ template <typename Dispatch>
+ class UniqueHandleTraits<PerformanceConfigurationINTEL, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniquePerformanceConfigurationINTEL = UniqueHandle<PerformanceConfigurationINTEL, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+
//=== VK_KHR_deferred_host_operations ===
template <typename Dispatch>
class UniqueHandleTraits<DeferredOperationKHR, Dispatch>
@@ -2136,6 +2174,24 @@ namespace VULKAN_HPP_NAMESPACE
};
using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ template <typename Dispatch>
+ class UniqueHandleTraits<CudaModuleNV, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniqueCudaModuleNV = UniqueHandle<CudaModuleNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch>
+ class UniqueHandleTraits<CudaFunctionNV, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniqueCudaFunctionNV = UniqueHandle<CudaFunctionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
template <typename Dispatch>
@@ -5686,6 +5742,19 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_KHR_synchronization2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -7098,6 +7167,168 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class CudaFunctionNV
+ {
+ public:
+ using CType = VkCudaFunctionNV;
+ using NativeType = VkCudaFunctionNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR CudaFunctionNV() = default;
+ VULKAN_HPP_CONSTEXPR CudaFunctionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT CudaFunctionNV( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( cudaFunctionNV ) {}
+
+# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+ CudaFunctionNV & operator=( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT
+ {
+ m_cudaFunctionNV = cudaFunctionNV;
+ return *this;
+ }
+# endif
+
+ CudaFunctionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_cudaFunctionNV = {};
+ return *this;
+ }
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CudaFunctionNV const & ) const = default;
+# else
+ bool operator==( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaFunctionNV == rhs.m_cudaFunctionNV;
+ }
+
+ bool operator!=( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaFunctionNV != rhs.m_cudaFunctionNV;
+ }
+
+ bool operator<( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaFunctionNV < rhs.m_cudaFunctionNV;
+ }
+# endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaFunctionNV() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaFunctionNV;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaFunctionNV != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaFunctionNV == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkCudaFunctionNV m_cudaFunctionNV = {};
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV;
+ };
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class CudaModuleNV
+ {
+ public:
+ using CType = VkCudaModuleNV;
+ using NativeType = VkCudaModuleNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR CudaModuleNV() = default;
+ VULKAN_HPP_CONSTEXPR CudaModuleNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT CudaModuleNV( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( cudaModuleNV ) {}
+
+# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+ CudaModuleNV & operator=( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT
+ {
+ m_cudaModuleNV = cudaModuleNV;
+ return *this;
+ }
+# endif
+
+ CudaModuleNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_cudaModuleNV = {};
+ return *this;
+ }
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CudaModuleNV const & ) const = default;
+# else
+ bool operator==( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaModuleNV == rhs.m_cudaModuleNV;
+ }
+
+ bool operator!=( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaModuleNV != rhs.m_cudaModuleNV;
+ }
+
+ bool operator<( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaModuleNV < rhs.m_cudaModuleNV;
+ }
+# endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaModuleNV() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaModuleNV;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaModuleNV != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_cudaModuleNV == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkCudaModuleNV m_cudaModuleNV = {};
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV;
+ };
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CudaModuleNV>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
class DescriptorPool
{
public:
@@ -12379,6 +12610,111 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type
+ createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type
+ createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /* VULKAN_HPP_NO_SMART_HANDLE */
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ size_t * pCacheSize,
+ void * pCacheData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = Uint8_tAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(
+ VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type
+ createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type
+ createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /* VULKAN_HPP_NO_SMART_HANDLE */
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp
index fa35d2e..eefe419 100644
--- a/include/vulkan/vulkan_hash.hpp
+++ b/include/vulkan/vulkan_hash.hpp
@@ -457,6 +457,28 @@ namespace std
}
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CudaModuleNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleNV const & cudaModuleNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkCudaModuleNV>{}( static_cast<VkCudaModuleNV>( cudaModuleNV ) );
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CudaFunctionNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionNV const & cudaFunctionNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkCudaFunctionNV>{}( static_cast<VkCudaFunctionNV>( cudaFunctionNV ) );
+ }
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
@@ -2732,6 +2754,67 @@ namespace std
}
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & cudaFunctionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.module );
+ for ( const char * p = cudaFunctionCreateInfoNV.pName; *p != '\0'; ++p )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, *p );
+ }
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const & cudaLaunchInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.function );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimX );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimY );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimZ );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimX );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimY );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimZ );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sharedMemBytes );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.paramCount );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pParams );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.extraCount );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pExtras );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & cudaModuleCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.dataSize );
+ VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pData );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_WIN32_KHR )
template <>
struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>
@@ -3902,6 +3985,20 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const & deviceQueueShaderCoreControlCreateInfoARM ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.shaderCoreCount );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const & directDriverLoadingInfoLUNARG ) const VULKAN_HPP_NOEXCEPT
@@ -7652,6 +7749,39 @@ namespace std
}
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const & physicalDeviceCudaKernelLaunchFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.cudaKernelLaunchFeatures );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const & physicalDeviceCudaKernelLaunchPropertiesNV ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMinor );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMajor );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>
{
@@ -10576,6 +10706,34 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const & physicalDeviceSchedulingControlsFeaturesARM ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.schedulingControls );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const & physicalDeviceSchedulingControlsPropertiesARM ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.schedulingControlsFlags );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & physicalDeviceSeparateDepthStencilLayoutsFeatures )
diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp
index f217921..99616d0 100644
--- a/include/vulkan/vulkan_raii.hpp
+++ b/include/vulkan/vulkan_raii.hpp
@@ -1394,6 +1394,16 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) );
+ vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) );
+ vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) );
+ vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) );
+ vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) );
+ vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
@@ -2295,6 +2305,23 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0;
+ PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0;
+ PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0;
+ PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0;
+ PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0;
+ PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0;
+# else
+ PFN_dummy vkCreateCudaModuleNV_placeholder = 0;
+ PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0;
+ PFN_dummy vkCreateCudaFunctionNV_placeholder = 0;
+ PFN_dummy vkDestroyCudaModuleNV_placeholder = 0;
+ PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0;
+ PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
@@ -2612,6 +2639,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
class IndirectCommandsLayoutNV;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ class CudaModuleNV;
+ class CudaFunctionNV;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
class BufferCollectionFUCHSIA;
@@ -4184,6 +4217,18 @@ namespace VULKAN_HPP_NAMESPACE
getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV
+ createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV
+ createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
@@ -5883,6 +5928,12 @@ namespace VULKAN_HPP_NAMESPACE
void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_KHR_synchronization2 ===
void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
@@ -6422,6 +6473,254 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class CudaFunctionNV
+ {
+ public:
+ using CType = VkCudaFunctionNV;
+ using CppType = vk::CudaFunctionNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateCudaFunctionNV( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkCudaFunctionNV *>( &m_function ) ) );
+ if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ detail::throwResultException( result, "vkCreateCudaFunctionNV" );
+ }
+ }
+
+ CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkCudaFunctionNV function,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_function( function )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ }
+
+ CudaFunctionNV( std::nullptr_t ) {}
+
+ ~CudaFunctionNV()
+ {
+ clear();
+ }
+
+ CudaFunctionNV() = delete;
+ CudaFunctionNV( CudaFunctionNV const & ) = delete;
+ CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT
+ : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
+ , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) )
+ , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+ {
+ }
+ CudaFunctionNV & operator=( CudaFunctionNV const & ) = delete;
+ CudaFunctionNV & operator =( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ if ( this != &rhs )
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_function, rhs.m_function );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV const & operator*() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_function;
+ }
+
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_function )
+ {
+ getDispatcher()->vkDestroyCudaFunctionNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_function = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV release()
+ {
+ m_device = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr );
+ }
+
+ VULKAN_HPP_NAMESPACE::Device getDevice() const
+ {
+ return m_device;
+ }
+
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_function, rhs.m_function );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ private:
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class CudaModuleNV
+ {
+ public:
+ using CType = VkCudaModuleNV;
+ using CppType = vk::CudaModuleNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateCudaModuleNV( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkCudaModuleNV *>( &m_module ) ) );
+ if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ detail::throwResultException( result, "vkCreateCudaModuleNV" );
+ }
+ }
+
+ CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkCudaModuleNV module,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_module( module )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ }
+
+ CudaModuleNV( std::nullptr_t ) {}
+
+ ~CudaModuleNV()
+ {
+ clear();
+ }
+
+ CudaModuleNV() = delete;
+ CudaModuleNV( CudaModuleNV const & ) = delete;
+ CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT
+ : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
+ , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) )
+ , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+ {
+ }
+ CudaModuleNV & operator=( CudaModuleNV const & ) = delete;
+ CudaModuleNV & operator =( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ if ( this != &rhs )
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_module, rhs.m_module );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::CudaModuleNV const & operator*() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_module;
+ }
+
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_module )
+ {
+ getDispatcher()->vkDestroyCudaModuleNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_module = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
+ VULKAN_HPP_NAMESPACE::CudaModuleNV release()
+ {
+ m_device = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr );
+ }
+
+ VULKAN_HPP_NAMESPACE::Device getDevice() const
+ {
+ return m_device;
+ }
+
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_module, rhs.m_module );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ //=== VK_NV_cuda_kernel_launch ===
+
+ VULKAN_HPP_NODISCARD std::vector<uint8_t> getCache() const;
+
+ private:
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
class DebugReportCallbackEXT
{
public:
@@ -18901,6 +19200,57 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV
+ Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV( *this, createInfo, allocator );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> CudaModuleNV::getCache() const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" );
+
+ std::vector<uint8_t> cacheData;
+ size_t cacheSize;
+ VkResult result;
+ do
+ {
+ result = getDispatcher()->vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), &cacheSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && cacheSize )
+ {
+ cacheData.resize( cacheSize );
+ result = getDispatcher()->vkGetCudaModuleCacheNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" );
+ VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
+ if ( cacheSize < cacheData.size() )
+ {
+ cacheData.resize( cacheSize );
+ }
+ return cacheData;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV
+ Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV( *this, createInfo, allocator );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" );
+
+ getDispatcher()->vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) );
+ }
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
diff --git a/include/vulkan/vulkan_shared.hpp b/include/vulkan/vulkan_shared.hpp
index 7b1bb23..f6f7116 100644
--- a/include/vulkan/vulkan_shared.hpp
+++ b/include/vulkan/vulkan_shared.hpp
@@ -654,6 +654,16 @@ namespace VULKAN_HPP_NAMESPACE
};
using SharedSwapchainKHR = SharedHandle<SwapchainKHR>;
+ //=== VK_KHR_display ===
+ template <>
+ class SharedHandleTraits<DisplayKHR>
+ {
+ public:
+ using DestructorType = PhysicalDevice;
+ using deleter = ObjectDestroyShared<DisplayKHR>;
+ };
+ using SharedDisplayKHR = SharedHandle<DisplayKHR>;
+
//=== VK_EXT_debug_report ===
template <>
class SharedHandleTraits<DebugReportCallbackEXT>
@@ -740,6 +750,16 @@ namespace VULKAN_HPP_NAMESPACE
};
using SharedAccelerationStructureNV = SharedHandle<AccelerationStructureNV>;
+ //=== VK_INTEL_performance_query ===
+ template <>
+ class SharedHandleTraits<PerformanceConfigurationINTEL>
+ {
+ public:
+ using DestructorType = Device;
+ using deleter = ObjectDestroyShared<PerformanceConfigurationINTEL>;
+ };
+ using SharedPerformanceConfigurationINTEL = SharedHandle<PerformanceConfigurationINTEL>;
+
//=== VK_KHR_deferred_host_operations ===
template <>
class SharedHandleTraits<DeferredOperationKHR>
@@ -760,6 +780,26 @@ namespace VULKAN_HPP_NAMESPACE
};
using SharedIndirectCommandsLayoutNV = SharedHandle<IndirectCommandsLayoutNV>;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_cuda_kernel_launch ===
+ template <>
+ class SharedHandleTraits<CudaModuleNV>
+ {
+ public:
+ using DestructorType = Device;
+ using deleter = ObjectDestroyShared<CudaModuleNV>;
+ };
+ using SharedCudaModuleNV = SharedHandle<CudaModuleNV>;
+ template <>
+ class SharedHandleTraits<CudaFunctionNV>
+ {
+ public:
+ using DestructorType = Device;
+ using deleter = ObjectDestroyShared<CudaFunctionNV>;
+ };
+ using SharedCudaFunctionNV = SharedHandle<CudaFunctionNV>;
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
template <>
@@ -941,20 +981,6 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_display ===
template <>
- class SharedHandle<DisplayKHR> : public SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>
- {
- friend SharedHandleBase<DisplayKHR, SharedPhysicalDevice>;
-
- public:
- SharedHandle() = default;
- explicit SharedHandle( DisplayKHR handle, SharedPhysicalDevice parent ) noexcept
- : SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>( handle, std::move( parent ) )
- {
- }
- };
- using SharedDisplayKHR = SharedHandle<DisplayKHR>;
-
- template <>
class SharedHandle<DisplayModeKHR> : public SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR>
{
friend SharedHandleBase<DisplayModeKHR, SharedDisplayKHR>;
@@ -967,22 +993,6 @@ namespace VULKAN_HPP_NAMESPACE
}
};
using SharedDisplayModeKHR = SharedHandle<DisplayModeKHR>;
-
- //=== VK_INTEL_performance_query ===
-
- template <>
- class SharedHandle<PerformanceConfigurationINTEL> : public SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>
- {
- friend SharedHandleBase<PerformanceConfigurationINTEL, SharedDevice>;
-
- public:
- SharedHandle() = default;
- explicit SharedHandle( PerformanceConfigurationINTEL handle, SharedDevice parent ) noexcept
- : SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>( handle, std::move( parent ) )
- {
- }
- };
- using SharedPerformanceConfigurationINTEL = SharedHandle<PerformanceConfigurationINTEL>;
#endif // !VULKAN_HPP_NO_SMART_HANDLE
} // namespace VULKAN_HPP_NAMESPACE
#endif // VULKAN_SHARED_HPP
diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp
index a42e54d..349f387 100644
--- a/include/vulkan/vulkan_static_assertions.hpp
+++ b/include/vulkan/vulkan_static_assertions.hpp
@@ -5122,6 +5122,48 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDi
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
"DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_NV_cuda_kernel_launch ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleNV ) == sizeof( VkCudaModuleNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaModuleNV>::value, "CudaModuleNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionNV ) == sizeof( VkCudaFunctionNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::value,
+ "CudaFunctionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV ) == sizeof( VkCudaModuleCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV>::value,
+ "CudaModuleCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV ) == sizeof( VkCudaFunctionCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV>::value,
+ "CudaFunctionCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV ) == sizeof( VkCudaLaunchInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV>::value,
+ "CudaLaunchInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV>::value,
+ "PhysicalDeviceCudaKernelLaunchFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV>::value,
+ "PhysicalDeviceCudaKernelLaunchPropertiesNV is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
//=== VK_NV_low_latency ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ),
@@ -6207,6 +6249,31 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM>::value,
"PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" );
+//=== VK_ARM_scheduling_controls ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM ) == sizeof( VkDeviceQueueShaderCoreControlCreateInfoARM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM>::value,
+ "DeviceQueueShaderCoreControlCreateInfoARM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM ) ==
+ sizeof( VkPhysicalDeviceSchedulingControlsFeaturesARM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM>::value,
+ "PhysicalDeviceSchedulingControlsFeaturesARM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM ) ==
+ sizeof( VkPhysicalDeviceSchedulingControlsPropertiesARM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM>::value,
+ "PhysicalDeviceSchedulingControlsPropertiesARM is not nothrow_move_constructible!" );
+
//=== VK_EXT_image_sliced_view_of_3d ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) ==
diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp
index b7bf645..2efa7af 100644
--- a/include/vulkan/vulkan_structs.hpp
+++ b/include/vulkan/vulkan_structs.hpp
@@ -19736,6 +19736,505 @@ namespace VULKAN_HPP_NAMESPACE
using Type = CuModuleCreateInfoNVX;
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct CudaFunctionCreateInfoNV
+ {
+ using NativeType = VkCudaFunctionCreateInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaFunctionCreateInfoNV;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ CudaFunctionCreateInfoNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , module( module_ )
+ , pName( pName_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CudaFunctionCreateInfoNV( *reinterpret_cast<CudaFunctionCreateInfoNV const *>( &rhs ) )
+ {
+ }
+
+ CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ ) VULKAN_HPP_NOEXCEPT
+ {
+ module = module_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pName = pName_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( this );
+ }
+
+ operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCudaFunctionCreateInfoNV *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CudaModuleNV const &, const char * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, module, pName );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
+ return cmp;
+ if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
+ return cmp;
+ if ( auto cmp = module <=> rhs.module; cmp != 0 )
+ return cmp;
+ if ( pName != rhs.pName )
+ if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
+ return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+ return std::strong_ordering::equivalent;
+ }
+# endif
+
+ bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
+ }
+
+ bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaFunctionCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::CudaModuleNV module = {};
+ const char * pName = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCudaFunctionCreateInfoNV>
+ {
+ using Type = CudaFunctionCreateInfoNV;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct CudaLaunchInfoNV
+ {
+ using NativeType = VkCudaLaunchInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaLaunchInfoNV;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ = {},
+ uint32_t gridDimX_ = {},
+ uint32_t gridDimY_ = {},
+ uint32_t gridDimZ_ = {},
+ uint32_t blockDimX_ = {},
+ uint32_t blockDimY_ = {},
+ uint32_t blockDimZ_ = {},
+ uint32_t sharedMemBytes_ = {},
+ size_t paramCount_ = {},
+ const void * const * pParams_ = {},
+ size_t extraCount_ = {},
+ const void * const * pExtras_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , function( function_ )
+ , gridDimX( gridDimX_ )
+ , gridDimY( gridDimY_ )
+ , gridDimZ( gridDimZ_ )
+ , blockDimX( blockDimX_ )
+ , blockDimY( blockDimY_ )
+ , blockDimZ( blockDimZ_ )
+ , sharedMemBytes( sharedMemBytes_ )
+ , paramCount( paramCount_ )
+ , pParams( pParams_ )
+ , extraCount( extraCount_ )
+ , pExtras( pExtras_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaLaunchInfoNV( *reinterpret_cast<CudaLaunchInfoNV const *>( &rhs ) ) {}
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_,
+ uint32_t gridDimX_,
+ uint32_t gridDimY_,
+ uint32_t gridDimZ_,
+ uint32_t blockDimX_,
+ uint32_t blockDimY_,
+ uint32_t blockDimZ_,
+ uint32_t sharedMemBytes_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , function( function_ )
+ , gridDimX( gridDimX_ )
+ , gridDimY( gridDimY_ )
+ , gridDimZ( gridDimZ_ )
+ , blockDimX( blockDimX_ )
+ , blockDimY( blockDimY_ )
+ , blockDimZ( blockDimZ_ )
+ , sharedMemBytes( sharedMemBytes_ )
+ , paramCount( params_.size() )
+ , pParams( params_.data() )
+ , extraCount( extras_.size() )
+ , pExtras( extras_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ ) VULKAN_HPP_NOEXCEPT
+ {
+ function = function_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
+ {
+ gridDimX = gridDimX_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
+ {
+ gridDimY = gridDimY_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
+ {
+ gridDimZ = gridDimZ_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
+ {
+ blockDimX = blockDimX_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
+ {
+ blockDimY = blockDimY_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
+ {
+ blockDimZ = blockDimZ_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sharedMemBytes = sharedMemBytes_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ paramCount = paramCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pParams = pParams_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CudaLaunchInfoNV & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
+ {
+ paramCount = params_.size();
+ pParams = params_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extraCount = extraCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pExtras = pExtras_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ CudaLaunchInfoNV & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extraCount = extras_.size();
+ pExtras = extras_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCudaLaunchInfoNV *>( this );
+ }
+
+ operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCudaLaunchInfoNV *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ size_t const &,
+ const void * const * const &,
+ size_t const &,
+ const void * const * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CudaLaunchInfoNV const & ) const = default;
+# else
+ bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) &&
+ ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
+ ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) &&
+ ( pExtras == rhs.pExtras );
+# endif
+ }
+
+ bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaLaunchInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::CudaFunctionNV function = {};
+ uint32_t gridDimX = {};
+ uint32_t gridDimY = {};
+ uint32_t gridDimZ = {};
+ uint32_t blockDimX = {};
+ uint32_t blockDimY = {};
+ uint32_t blockDimZ = {};
+ uint32_t sharedMemBytes = {};
+ size_t paramCount = {};
+ const void * const * pParams = {};
+ size_t extraCount = {};
+ const void * const * pExtras = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCudaLaunchInfoNV>
+ {
+ using Type = CudaLaunchInfoNV;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct CudaModuleCreateInfoNV
+ {
+ using NativeType = VkCudaModuleCreateInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaModuleCreateInfoNV;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dataSize( dataSize_ )
+ , pData( pData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CudaModuleCreateInfoNV( *reinterpret_cast<CudaModuleCreateInfoNV const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ template <typename T>
+ CudaModuleCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataSize = dataSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pData = pData_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ template <typename T>
+ CudaModuleCreateInfoNV & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataSize = data_.size() * sizeof( T );
+ pData = data_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCudaModuleCreateInfoNV *>( this );
+ }
+
+ operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCudaModuleCreateInfoNV *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, dataSize, pData );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CudaModuleCreateInfoNV const & ) const = default;
+# else
+ bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
+# endif
+ }
+
+ bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaModuleCreateInfoNV;
+ const void * pNext = {};
+ size_t dataSize = {};
+ const void * pData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCudaModuleCreateInfoNV>
+ {
+ using Type = CudaModuleCreateInfoNV;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
struct D3D12FenceSubmitInfoKHR
{
@@ -29582,6 +30081,102 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DeviceQueueInfo2;
};
+ struct DeviceQueueShaderCoreControlCreateInfoARM
+ {
+ using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderCoreCount( shaderCoreCount_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast<DeviceQueueShaderCoreControlCreateInfoARM const *>( &rhs ) )
+ {
+ }
+
+ DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderCoreCount = shaderCoreCount_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceQueueShaderCoreControlCreateInfoARM *>( this );
+ }
+
+ operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceQueueShaderCoreControlCreateInfoARM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderCoreCount );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default;
+#else
+ bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount );
+# endif
+ }
+
+ bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM;
+ void * pNext = {};
+ uint32_t shaderCoreCount = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceQueueShaderCoreControlCreateInfoARM>
+ {
+ using Type = DeviceQueueShaderCoreControlCreateInfoARM;
+ };
+
struct DirectDriverLoadingInfoLUNARG
{
using NativeType = VkDirectDriverLoadingInfoLUNARG;
@@ -58128,6 +58723,195 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceCubicWeightsFeaturesQCOM;
};
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct PhysicalDeviceCudaKernelLaunchFeaturesNV
+ {
+ using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , cudaKernelLaunchFeatures( cudaKernelLaunchFeatures_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast<PhysicalDeviceCudaKernelLaunchFeaturesNV const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV &
+ setCudaKernelLaunchFeatures( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ ) VULKAN_HPP_NOEXCEPT
+ {
+ cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchFeaturesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchFeaturesNV *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, cudaKernelLaunchFeatures );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default;
+# else
+ bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV>
+ {
+ using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct PhysicalDeviceCudaKernelLaunchPropertiesNV
+ {
+ using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {},
+ uint32_t computeCapabilityMajor_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , computeCapabilityMinor( computeCapabilityMinor_ )
+ , computeCapabilityMajor( computeCapabilityMajor_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast<PhysicalDeviceCudaKernelLaunchPropertiesNV const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchPropertiesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchPropertiesNV *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default;
+# else
+ bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeCapabilityMinor == rhs.computeCapabilityMinor ) &&
+ ( computeCapabilityMajor == rhs.computeCapabilityMajor );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV;
+ void * pNext = {};
+ uint32_t computeCapabilityMinor = {};
+ uint32_t computeCapabilityMajor = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV>
+ {
+ using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
struct PhysicalDeviceCustomBorderColorFeaturesEXT
{
using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;
@@ -77723,6 +78507,204 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+ struct PhysicalDeviceSchedulingControlsFeaturesARM
+ {
+ using NativeType = VkPhysicalDeviceSchedulingControlsFeaturesARM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , schedulingControls( schedulingControls_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSchedulingControlsFeaturesARM( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSchedulingControlsFeaturesARM( *reinterpret_cast<PhysicalDeviceSchedulingControlsFeaturesARM const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceSchedulingControlsFeaturesARM & operator=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceSchedulingControlsFeaturesARM & operator=( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM &
+ setSchedulingControls( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ ) VULKAN_HPP_NOEXCEPT
+ {
+ schedulingControls = schedulingControls_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceSchedulingControlsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSchedulingControlsFeaturesARM *>( this );
+ }
+
+ operator VkPhysicalDeviceSchedulingControlsFeaturesARM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSchedulingControlsFeaturesARM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, schedulingControls );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceSchedulingControlsFeaturesARM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControls == rhs.schedulingControls );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 schedulingControls = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM>
+ {
+ using Type = PhysicalDeviceSchedulingControlsFeaturesARM;
+ };
+
+ struct PhysicalDeviceSchedulingControlsPropertiesARM
+ {
+ using NativeType = VkPhysicalDeviceSchedulingControlsPropertiesARM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceSchedulingControlsPropertiesARM( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , schedulingControlsFlags( schedulingControlsFlags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSchedulingControlsPropertiesARM( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSchedulingControlsPropertiesARM( *reinterpret_cast<PhysicalDeviceSchedulingControlsPropertiesARM const *>( &rhs ) )
+ {
+ }
+
+ PhysicalDeviceSchedulingControlsPropertiesARM & operator=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceSchedulingControlsPropertiesARM & operator=( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsPropertiesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsPropertiesARM &
+ setSchedulingControlsFlags( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ schedulingControlsFlags = schedulingControlsFlags_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSchedulingControlsPropertiesARM *>( this );
+ }
+
+ operator VkPhysicalDeviceSchedulingControlsPropertiesARM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSchedulingControlsPropertiesARM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, schedulingControlsFlags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceSchedulingControlsPropertiesARM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControlsFlags == rhs.schedulingControlsFlags );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM>
+ {
+ using Type = PhysicalDeviceSchedulingControlsPropertiesARM;
+ };
+
struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
{
using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp
index 3a34b36..2622002 100644
--- a/include/vulkan/vulkan_to_string.hpp
+++ b/include/vulkan/vulkan_to_string.hpp
@@ -3248,6 +3248,20 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
+ //=== VK_ARM_scheduling_controls ===
+
+ VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagsARM value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount )
+ result += "ShaderCoreCount | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
//=== VK_NV_memory_decompression ===
VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value )
@@ -4242,6 +4256,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::eCudaModuleCreateInfoNV: return "CudaModuleCreateInfoNV";
+ case StructureType::eCudaFunctionCreateInfoNV: return "CudaFunctionCreateInfoNV";
+ case StructureType::eCudaLaunchInfoNV: return "CudaLaunchInfoNV";
+ case StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV: return "PhysicalDeviceCudaKernelLaunchFeaturesNV";
+ case StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV: return "PhysicalDeviceCudaKernelLaunchPropertiesNV";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case StructureType::eQueryLowLatencySupportNV: return "QueryLowLatencySupportNV";
#if defined( VK_USE_PLATFORM_METAL_EXT )
case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT";
@@ -4379,6 +4400,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT";
case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT";
case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM";
+ case StructureType::eDeviceQueueShaderCoreControlCreateInfoARM: return "DeviceQueueShaderCoreControlCreateInfoARM";
+ case StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM: return "PhysicalDeviceSchedulingControlsFeaturesARM";
+ case StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM: return "PhysicalDeviceSchedulingControlsPropertiesARM";
case StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT: return "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT";
case StructureType::eImageViewSlicedCreateInfoEXT: return "ImageViewSlicedCreateInfoEXT";
case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE";
@@ -4553,6 +4577,10 @@ namespace VULKAN_HPP_NAMESPACE
case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL";
case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR";
case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case ObjectType::eCudaModuleNV: return "CudaModuleNV";
+ case ObjectType::eCudaFunctionNV: return "CudaFunctionNV";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -6947,6 +6975,10 @@ namespace VULKAN_HPP_NAMESPACE
case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX";
case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR";
case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case DebugReportObjectTypeEXT::eCudaModuleNV: return "CudaModuleNV";
+ case DebugReportObjectTypeEXT::eCudaFunctionNV: return "CudaFunctionNV";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -8638,6 +8670,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ //=== VK_ARM_scheduling_controls ===
+
+ VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagBitsARM value )
+ {
+ switch ( value )
+ {
+ case PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount: return "ShaderCoreCount";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
//=== VK_NV_memory_decompression ===
VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value )