aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorJon Leech <[email protected]>2021-06-06 23:10:19 -0700
committerJon Leech <[email protected]>2021-06-06 23:10:45 -0700
commit07c4a37bcf41ea50aef6e98236abdfe8089fb4c6 (patch)
tree313a5be92b1fe1778de102812588310dadf6ff09
parent7fe877c90abf00bc71b3c68f49db4c9bb1010411 (diff)
downloadVulkan-Headers-07c4a37bcf41ea50aef6e98236abdfe8089fb4c6.tar.gz
Vulkan-Headers-07c4a37bcf41ea50aef6e98236abdfe8089fb4c6.zip
Update for Vulkan-Docs 1.2.180v1.2.180
-rw-r--r--include/vulkan/vulkan.hpp36161
-rw-r--r--include/vulkan/vulkan_core.h38
-rw-r--r--registry/cgenerator.py41
-rw-r--r--registry/generator.py41
-rw-r--r--registry/validusage.json422
-rw-r--r--registry/vk.xml190
6 files changed, 18995 insertions, 17898 deletions
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
index 7772cb9..54fae4f 100644
--- a/include/vulkan/vulkan.hpp
+++ b/include/vulkan/vulkan.hpp
@@ -67,7 +67,7 @@
#endif
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
-# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
+# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
# include <dlfcn.h>
# elif defined( _WIN32 )
typedef struct HINSTANCE__ * HINSTANCE;
@@ -93,7 +93,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <compare>
#endif
-static_assert( VK_HEADER_VERSION == 179, "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 180, "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -2893,7 +2893,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_XLIB_KHR )
-
//=== VK_KHR_xlib_surface ===
VkResult vkCreateXlibSurfaceKHR( VkInstance instance,
@@ -2914,7 +2913,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
-
//=== VK_KHR_xcb_surface ===
VkResult vkCreateXcbSurfaceKHR( VkInstance instance,
@@ -2935,7 +2933,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
-
//=== VK_KHR_wayland_surface ===
VkResult vkCreateWaylandSurfaceKHR( VkInstance instance,
@@ -2955,7 +2952,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
-
//=== VK_KHR_android_surface ===
VkResult vkCreateAndroidSurfaceKHR( VkInstance instance,
@@ -2968,7 +2964,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_KHR_win32_surface ===
VkResult vkCreateWin32SurfaceKHR( VkInstance instance,
@@ -3048,7 +3043,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_queue ===
VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice,
@@ -3147,7 +3141,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_decode_queue ===
void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer,
@@ -3311,7 +3304,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_GGP )
-
//=== VK_GGP_stream_descriptor_surface ===
VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance,
@@ -3340,7 +3332,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_NV_external_memory_win32 ===
VkResult vkGetMemoryWin32HandleNV( VkDevice device,
@@ -3437,7 +3428,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_VI_NN )
-
//=== VK_NN_vi_surface ===
VkResult vkCreateViSurfaceNN( VkInstance instance,
@@ -3481,7 +3471,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_KHR_external_memory_win32 ===
VkResult vkGetMemoryWin32HandleKHR( VkDevice device,
@@ -3529,7 +3518,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_KHR_external_semaphore_win32 ===
VkResult vkImportSemaphoreWin32HandleKHR(
@@ -3643,7 +3631,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
-
//=== VK_EXT_acquire_xlib_display ===
VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice,
@@ -3795,7 +3782,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_KHR_external_fence_win32 ===
VkResult vkImportFenceWin32HandleKHR(
@@ -3912,7 +3898,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_IOS_MVK )
-
//=== VK_MVK_ios_surface ===
VkResult vkCreateIOSSurfaceMVK( VkInstance instance,
@@ -3925,7 +3910,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_IOS_MVK*/
# if defined( VK_USE_PLATFORM_MACOS_MVK )
-
//=== VK_MVK_macos_surface ===
VkResult vkCreateMacOSSurfaceMVK( VkInstance instance,
@@ -4010,7 +3994,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
-
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device,
@@ -4672,7 +4655,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
-
//=== VK_FUCHSIA_imagepipe_surface ===
VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance,
@@ -4685,7 +4667,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_METAL_EXT )
-
//=== VK_EXT_metal_surface ===
VkResult vkCreateMetalSurfaceEXT( VkInstance instance,
@@ -4756,7 +4737,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_EXT_full_screen_exclusive ===
VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice,
@@ -5067,7 +5047,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_encode_queue ===
void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer,
@@ -5190,7 +5169,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_NV_acquire_winrt_display ===
VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
@@ -5207,7 +5185,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
-
//=== VK_EXT_directfb_surface ===
VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance,
@@ -5326,7 +5303,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
-
//=== VK_FUCHSIA_external_memory ===
VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device,
@@ -5348,7 +5324,6 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
-
//=== VK_FUCHSIA_external_semaphore ===
VkResult vkImportSemaphoreZirconHandleFUCHSIA(
@@ -5397,7 +5372,6 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
-
//=== VK_QNX_screen_surface ===
VkResult vkCreateScreenSurfaceQNX( VkInstance instance,
@@ -6420,8 +6394,10 @@ namespace VULKAN_HPP_NAMESPACE
eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
- eQueueFamilyCheckpointProperties2Nv = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
- eCheckpointData2Nv = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
+ eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
+ eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
+ ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR =
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR,
ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV =
@@ -6476,20 +6452,23 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT,
ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT,
- eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
- eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
- eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
- eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
- eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
- eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
- eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
- eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
- eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
- eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
- eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
- eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
- eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
- eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+ ePhysicalDeviceGlobalPriorityQueryFeaturesEXT =
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT,
+ eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT,
+ eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+ eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+ eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+ eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+ eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+ eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
+ eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
+ eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+ eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+ eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+ eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+ eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+ eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
eDescriptorSetLayoutBindingFlagsCreateInfoEXT =
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
@@ -7295,8 +7274,10 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eCommandBufferSubmitInfoKHR: return "CommandBufferSubmitInfoKHR";
case StructureType::ePhysicalDeviceSynchronization2FeaturesKHR:
return "PhysicalDeviceSynchronization2FeaturesKHR";
- case StructureType::eQueueFamilyCheckpointProperties2Nv: return "QueueFamilyCheckpointProperties2Nv";
- case StructureType::eCheckpointData2Nv: return "CheckpointData2Nv";
+ case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV";
+ case StructureType::eCheckpointData2NV: return "CheckpointData2NV";
+ case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR:
+ return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR";
case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR:
return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR";
case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV:
@@ -7352,6 +7333,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT:
return "PhysicalDeviceColorWriteEnableFeaturesEXT";
case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT";
+ case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT:
+ return "PhysicalDeviceGlobalPriorityQueryFeaturesEXT";
+ case StructureType::eQueueFamilyGlobalPriorityPropertiesEXT: return "QueueFamilyGlobalPriorityPropertiesEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -10547,8 +10531,7 @@ namespace VULKAN_HPP_NAMESPACE
eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE,
eMoltenvk = VK_DRIVER_ID_MOLTENVK,
eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY,
- eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY,
- eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR
+ eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY
};
using DriverIdKHR = DriverId;
@@ -11009,7 +10992,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_queue ===
enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR
@@ -11166,7 +11148,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_decode_queue ===
enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR
@@ -11187,7 +11168,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_EXT_video_encode_h264 ===
enum class VideoEncodeH264CapabilitiesFlagBitsEXT : VkVideoEncodeH264CapabilitiesFlagsEXT
@@ -11243,74 +11223,65 @@ namespace VULKAN_HPP_NAMESPACE
enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT
{
- eVkVideoEncodeH264InputModeFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT,
- eVkVideoEncodeH264InputModeSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT,
- eVkVideoEncodeH264InputModeNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT
+ eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT,
+ eSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT,
+ eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT
};
VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value )
{
switch ( value )
{
- case VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeFrame:
- return "VkVideoEncodeH264InputModeFrame";
- case VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeSlice:
- return "VkVideoEncodeH264InputModeSlice";
- case VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeNonVcl:
- return "VkVideoEncodeH264InputModeNonVcl";
+ case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame";
+ case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice";
+ case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT
{
- eVkVideoEncodeH264OutputModeFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT,
- eVkVideoEncodeH264OutputModeSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT,
- eVkVideoEncodeH264OutputModeNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT
+ eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT,
+ eSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT,
+ eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT
};
VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value )
{
switch ( value )
{
- case VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeFrame:
- return "VkVideoEncodeH264OutputModeFrame";
- case VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeSlice:
- return "VkVideoEncodeH264OutputModeSlice";
- case VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeNonVcl:
- return "VkVideoEncodeH264OutputModeNonVcl";
+ case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame";
+ case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice";
+ case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
enum class VideoEncodeH264CreateFlagBitsEXT : VkVideoEncodeH264CreateFlagsEXT
{
- eVkVideoEncodeH264CreateDefault = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT,
- eVkVideoEncodeH264CreateReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT
+ eDefault = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT,
+ eReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT
};
VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagBitsEXT value )
{
switch ( value )
{
- case VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateDefault: return "VkVideoEncodeH264CreateDefault";
- case VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateReserved0:
- return "VkVideoEncodeH264CreateReserved0";
+ case VideoEncodeH264CreateFlagBitsEXT::eDefault: return "Default";
+ case VideoEncodeH264CreateFlagBitsEXT::eReserved0: return "Reserved0";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_EXT_video_decode_h264 ===
enum class VideoDecodeH264FieldLayoutFlagBitsEXT : VkVideoDecodeH264FieldLayoutFlagsEXT
{
- eVkVideoDecodeH264ProgressivePicturesOnly = VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT,
- eVkVideoDecodeH264FieldLayoutLineInterlacedPlane = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT,
- eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane =
- VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT
+ eVkVideoDecodeH264ProgressivePicturesOnly = VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT,
+ eLineInterlacedPlane = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT,
+ eSeparateInterlacedPlane = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT
};
VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagBitsEXT value )
@@ -11319,10 +11290,8 @@ namespace VULKAN_HPP_NAMESPACE
{
case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly:
return "VkVideoDecodeH264ProgressivePicturesOnly";
- case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutLineInterlacedPlane:
- return "VkVideoDecodeH264FieldLayoutLineInterlacedPlane";
- case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane:
- return "VkVideoDecodeH264FieldLayoutSeparateInterlacedPlane";
+ case VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane: return "LineInterlacedPlane";
+ case VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane: return "SeparateInterlacedPlane";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -12293,13 +12262,14 @@ namespace VULKAN_HPP_NAMESPACE
enum class ValidationFeatureDisableEXT
{
- eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
- eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
- eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
- eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
- eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
- eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
- eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT
+ eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
+ eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
+ eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
+ eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
+ eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
+ eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
+ eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT,
+ eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT
};
VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
@@ -12313,6 +12283,7 @@ namespace VULKAN_HPP_NAMESPACE
case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes";
case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks";
case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles";
+ case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -12410,7 +12381,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_EXT_full_screen_exclusive ===
enum class FullScreenExclusiveEXT
@@ -12576,7 +12546,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_encode_queue ===
enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR
@@ -12683,18 +12652,18 @@ namespace VULKAN_HPP_NAMESPACE
eVideoDecode = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR,
eVideoEncode = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eTransformFeedbackExt = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
- eConditionalRenderingExt = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
- eCommandPreprocessNv = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
+ eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
+ eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
+ eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
eFragmentShadingRateAttachment = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
eAccelerationStructureBuild = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
eRayTracingShader = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
- eFragmentDensityProcessExt = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
- eTaskShaderNv = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
- eMeshShaderNv = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
- eAccelerationStructureBuildNv = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
- eRayTracingShaderNv = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
- eShadingRateImageNv = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
+ eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+ eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
+ eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
+ eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+ eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
+ eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR
};
@@ -12731,15 +12700,15 @@ namespace VULKAN_HPP_NAMESPACE
case PipelineStageFlagBits2KHR::eVideoDecode: return "VideoDecode";
case PipelineStageFlagBits2KHR::eVideoEncode: return "VideoEncode";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case PipelineStageFlagBits2KHR::eTransformFeedbackExt: return "TransformFeedbackExt";
- case PipelineStageFlagBits2KHR::eConditionalRenderingExt: return "ConditionalRenderingExt";
- case PipelineStageFlagBits2KHR::eCommandPreprocessNv: return "CommandPreprocessNv";
+ case PipelineStageFlagBits2KHR::eTransformFeedbackEXT: return "TransformFeedbackEXT";
+ case PipelineStageFlagBits2KHR::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
+ case PipelineStageFlagBits2KHR::eCommandPreprocessNV: return "CommandPreprocessNV";
case PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment";
case PipelineStageFlagBits2KHR::eAccelerationStructureBuild: return "AccelerationStructureBuild";
case PipelineStageFlagBits2KHR::eRayTracingShader: return "RayTracingShader";
- case PipelineStageFlagBits2KHR::eFragmentDensityProcessExt: return "FragmentDensityProcessExt";
- case PipelineStageFlagBits2KHR::eTaskShaderNv: return "TaskShaderNv";
- case PipelineStageFlagBits2KHR::eMeshShaderNv: return "MeshShaderNv";
+ case PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
+ case PipelineStageFlagBits2KHR::eTaskShaderNV: return "TaskShaderNV";
+ case PipelineStageFlagBits2KHR::eMeshShaderNV: return "MeshShaderNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -12773,20 +12742,20 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeRead = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR,
eVideoEncodeWrite = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eTransformFeedbackWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
- eTransformFeedbackCounterReadExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
- eTransformFeedbackCounterWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
- eConditionalRenderingReadExt = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
- eCommandPreprocessReadNv = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
- eCommandPreprocessWriteNv = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
+ eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+ eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+ eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+ eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
+ eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
+ eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
eFragmentShadingRateAttachmentRead = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
eAccelerationStructureRead = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR,
eAccelerationStructureWrite = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
- eFragmentDensityMapReadExt = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
- eColorAttachmentReadNoncoherentExt = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
- eAccelerationStructureReadNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
- eAccelerationStructureWriteNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
- eShadingRateImageReadNv = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
+ eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+ eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+ eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
+ eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+ eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
};
VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2KHR value )
@@ -12820,17 +12789,17 @@ namespace VULKAN_HPP_NAMESPACE
case AccessFlagBits2KHR::eVideoEncodeRead: return "VideoEncodeRead";
case AccessFlagBits2KHR::eVideoEncodeWrite: return "VideoEncodeWrite";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case AccessFlagBits2KHR::eTransformFeedbackWriteExt: return "TransformFeedbackWriteExt";
- case AccessFlagBits2KHR::eTransformFeedbackCounterReadExt: return "TransformFeedbackCounterReadExt";
- case AccessFlagBits2KHR::eTransformFeedbackCounterWriteExt: return "TransformFeedbackCounterWriteExt";
- case AccessFlagBits2KHR::eConditionalRenderingReadExt: return "ConditionalRenderingReadExt";
- case AccessFlagBits2KHR::eCommandPreprocessReadNv: return "CommandPreprocessReadNv";
- case AccessFlagBits2KHR::eCommandPreprocessWriteNv: return "CommandPreprocessWriteNv";
+ case AccessFlagBits2KHR::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
+ case AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
+ case AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
+ case AccessFlagBits2KHR::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
+ case AccessFlagBits2KHR::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
+ case AccessFlagBits2KHR::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
case AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead: return "FragmentShadingRateAttachmentRead";
case AccessFlagBits2KHR::eAccelerationStructureRead: return "AccelerationStructureRead";
case AccessFlagBits2KHR::eAccelerationStructureWrite: return "AccelerationStructureWrite";
- case AccessFlagBits2KHR::eFragmentDensityMapReadExt: return "FragmentDensityMapReadExt";
- case AccessFlagBits2KHR::eColorAttachmentReadNoncoherentExt: return "ColorAttachmentReadNoncoherentExt";
+ case AccessFlagBits2KHR::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
+ case AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -13057,8 +13026,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & FormatFeatureFlagBits::eSampledImage )
result += "SampledImage | ";
if ( value & FormatFeatureFlagBits::eStorageImage )
@@ -13179,8 +13148,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ImageCreateFlagBits::eSparseBinding )
result += "SparseBinding | ";
if ( value & ImageCreateFlagBits::eSparseResidency )
@@ -13266,8 +13235,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ImageUsageFlagBits::eTransferSrc )
result += "TransferSrc | ";
if ( value & ImageUsageFlagBits::eTransferDst )
@@ -13360,8 +13329,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & MemoryHeapFlagBits::eDeviceLocal )
result += "DeviceLocal | ";
if ( value & MemoryHeapFlagBits::eMultiInstance )
@@ -13412,8 +13381,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & MemoryPropertyFlagBits::eDeviceLocal )
result += "DeviceLocal | ";
if ( value & MemoryPropertyFlagBits::eHostVisible )
@@ -13475,8 +13444,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & QueueFlagBits::eGraphics )
result += "Graphics | ";
if ( value & QueueFlagBits::eCompute )
@@ -13539,8 +13508,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SampleCountFlagBits::e1 )
result += "1 | ";
if ( value & SampleCountFlagBits::e2 )
@@ -13604,8 +13573,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DeviceQueueCreateFlagBits::eProtected )
result += "Protected | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -13665,8 +13634,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PipelineStageFlagBits::eTopOfPipe )
result += "TopOfPipe | ";
if ( value & PipelineStageFlagBits::eDrawIndirect )
@@ -13781,8 +13750,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ImageAspectFlagBits::eColor )
result += "Color | ";
if ( value & ImageAspectFlagBits::eDepth )
@@ -13849,8 +13818,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SparseImageFormatFlagBits::eSingleMiptail )
result += "SingleMiptail | ";
if ( value & SparseImageFormatFlagBits::eAlignedMipSize )
@@ -13899,8 +13868,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SparseMemoryBindFlagBits::eMetadata )
result += "Metadata | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -13944,8 +13913,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & FenceCreateFlagBits::eSignaled )
result += "Signaled | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -14005,8 +13974,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & EventCreateFlagBits::eDeviceOnlyKHR )
result += "DeviceOnlyKHR | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -14061,8 +14030,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices )
result += "InputAssemblyVertices | ";
if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives )
@@ -14137,8 +14106,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & QueryResultFlagBits::e64 )
result += "64 | ";
if ( value & QueryResultFlagBits::eWait )
@@ -14194,8 +14163,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & BufferCreateFlagBits::eSparseBinding )
result += "SparseBinding | ";
if ( value & BufferCreateFlagBits::eSparseResidency )
@@ -14264,8 +14233,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & BufferUsageFlagBits::eTransferSrc )
result += "TransferSrc | ";
if ( value & BufferUsageFlagBits::eTransferDst )
@@ -14373,8 +14342,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT )
result += "FragmentDensityMapDynamicEXT | ";
if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
@@ -14428,8 +14397,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT )
result += "ExternallySynchronizedEXT | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -14475,8 +14444,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ColorComponentFlagBits::eR )
result += "R | ";
if ( value & ColorComponentFlagBits::eG )
@@ -14527,8 +14496,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & CullModeFlagBits::eFront )
result += "Front | ";
if ( value & CullModeFlagBits::eBack )
@@ -14606,8 +14575,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PipelineCreateFlagBits::eDisableOptimization )
result += "DisableOptimization | ";
if ( value & PipelineCreateFlagBits::eAllowDerivatives )
@@ -14785,8 +14754,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT )
result += "AllowVaryingSubgroupSizeEXT | ";
if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT )
@@ -14887,8 +14856,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ShaderStageFlagBits::eVertex )
result += "Vertex | ";
if ( value & ShaderStageFlagBits::eTessellationControl )
@@ -14959,8 +14928,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SamplerCreateFlagBits::eSubsampledEXT )
result += "SubsampledEXT | ";
if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT )
@@ -15009,8 +14978,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet )
result += "FreeDescriptorSet | ";
if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind )
@@ -15077,8 +15046,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool )
result += "UpdateAfterBindPool | ";
if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR )
@@ -15145,8 +15114,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & AccessFlagBits::eIndirectCommandRead )
result += "IndirectCommandRead | ";
if ( value & AccessFlagBits::eIndexRead )
@@ -15245,8 +15214,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & AttachmentDescriptionFlagBits::eMayAlias )
result += "MayAlias | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -15291,8 +15260,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DependencyFlagBits::eByRegion )
result += "ByRegion | ";
if ( value & DependencyFlagBits::eDeviceGroup )
@@ -15341,8 +15310,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & FramebufferCreateFlagBits::eImageless )
result += "Imageless | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -15387,8 +15356,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & RenderPassCreateFlagBits::eTransformQCOM )
result += "TransformQCOM | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -15436,8 +15405,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX )
result += "PerViewAttributesNVX | ";
if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX )
@@ -15490,8 +15459,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & CommandPoolCreateFlagBits::eTransient )
result += "Transient | ";
if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer )
@@ -15540,8 +15509,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & CommandPoolResetFlagBits::eReleaseResources )
result += "ReleaseResources | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -15586,8 +15555,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & CommandBufferResetFlagBits::eReleaseResources )
result += "ReleaseResources | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -15634,8 +15603,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit )
result += "OneTimeSubmit | ";
if ( value & CommandBufferUsageFlagBits::eRenderPassContinue )
@@ -15683,8 +15652,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & QueryControlFlagBits::ePrecise )
result += "Precise | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -15729,8 +15698,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & StencilFaceFlagBits::eFront )
result += "Front | ";
if ( value & StencilFaceFlagBits::eBack )
@@ -15783,8 +15752,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SubgroupFeatureFlagBits::eBasic )
result += "Basic | ";
if ( value & SubgroupFeatureFlagBits::eVote )
@@ -15848,8 +15817,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PeerMemoryFeatureFlagBits::eCopySrc )
result += "CopySrc | ";
if ( value & PeerMemoryFeatureFlagBits::eCopyDst )
@@ -15903,8 +15872,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & MemoryAllocateFlagBits::eDeviceMask )
result += "DeviceMask | ";
if ( value & MemoryAllocateFlagBits::eDeviceAddress )
@@ -16006,8 +15975,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
result += "OpaqueFd | ";
if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 )
@@ -16082,8 +16051,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly )
result += "DedicatedOnly | ";
if ( value & ExternalMemoryFeatureFlagBits::eExportable )
@@ -16137,8 +16106,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd )
result += "OpaqueFd | ";
if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 )
@@ -16192,8 +16161,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalFenceFeatureFlagBits::eExportable )
result += "Exportable | ";
if ( value & ExternalFenceFeatureFlagBits::eImportable )
@@ -16241,8 +16210,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & FenceImportFlagBits::eTemporary )
result += "Temporary | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -16289,8 +16258,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SemaphoreImportFlagBits::eTemporary )
result += "Temporary | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -16344,8 +16313,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
result += "OpaqueFd | ";
if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 )
@@ -16405,8 +16374,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalSemaphoreFeatureFlagBits::eExportable )
result += "Exportable | ";
if ( value & ExternalSemaphoreFeatureFlagBits::eImportable )
@@ -16460,8 +16429,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DescriptorBindingFlagBits::eUpdateAfterBind )
result += "UpdateAfterBind | ";
if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending )
@@ -16515,8 +16484,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ResolveModeFlagBits::eSampleZero )
result += "SampleZero | ";
if ( value & ResolveModeFlagBits::eAverage )
@@ -16568,8 +16537,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SemaphoreWaitFlagBits::eAny )
result += "Any | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -16617,8 +16586,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & CompositeAlphaFlagBitsKHR::eOpaque )
result += "Opaque | ";
if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied )
@@ -16673,8 +16642,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions )
result += "SplitInstanceBindRegions | ";
if ( value & SwapchainCreateFlagBitsKHR::eProtected )
@@ -16726,8 +16695,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal )
result += "Local | ";
if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote )
@@ -16798,8 +16767,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque )
result += "Opaque | ";
if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal )
@@ -16873,8 +16842,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SurfaceTransformFlagBitsKHR::eIdentity )
result += "Identity | ";
if ( value & SurfaceTransformFlagBitsKHR::eRotate90 )
@@ -16897,7 +16866,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_USE_PLATFORM_XLIB_KHR )
-
//=== VK_KHR_xlib_surface ===
enum class XlibSurfaceCreateFlagBitsKHR : VkFlags
@@ -16918,7 +16886,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
-
//=== VK_KHR_xcb_surface ===
enum class XcbSurfaceCreateFlagBitsKHR : VkFlags
@@ -16939,7 +16906,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
-
//=== VK_KHR_wayland_surface ===
enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags
@@ -16960,7 +16926,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
-
//=== VK_KHR_android_surface ===
enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags
@@ -16981,7 +16946,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
-
//=== VK_KHR_win32_surface ===
enum class Win32SurfaceCreateFlagBitsKHR : VkFlags
@@ -17044,8 +17008,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DebugReportFlagBitsEXT::eInformation )
result += "Information | ";
if ( value & DebugReportFlagBitsEXT::eWarning )
@@ -17060,7 +17024,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_queue ===
using VideoCodecOperationFlagsKHR = Flags<VideoCodecOperationFlagBitsKHR>;
@@ -17107,8 +17070,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT )
result += "EncodeH264EXT | ";
@@ -17166,8 +17129,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome )
result += "Monochrome | ";
if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 )
@@ -17220,8 +17183,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoComponentBitDepthFlagBitsKHR::e8 )
result += "8 | ";
if ( value & VideoComponentBitDepthFlagBitsKHR::e10 )
@@ -17271,8 +17234,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoCapabilitiesFlagBitsKHR::eProtectedContent )
result += "ProtectedContent | ";
if ( value & VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages )
@@ -17320,8 +17283,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent )
result += "ProtectedContent | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -17398,8 +17361,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoCodingControlFlagBitsKHR::eReset )
result += "Reset | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -17447,8 +17410,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoCodingQualityPresetFlagBitsKHR::eNormal )
result += "Normal | ";
if ( value & VideoCodingQualityPresetFlagBitsKHR::ePower )
@@ -17460,7 +17423,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_decode_queue ===
using VideoDecodeFlagsKHR = Flags<VideoDecodeFlagBitsKHR>;
@@ -17502,8 +17464,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoDecodeFlagBitsKHR::eReserved0 )
result += "Reserved0 | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -17529,7 +17491,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_EXT_video_encode_h264 ===
using VideoEncodeH264CapabilitiesFlagsEXT = Flags<VideoEncodeH264CapabilitiesFlagBitsEXT>;
@@ -17582,8 +17543,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac )
result += "VkVideoEncodeH264CapabilityCabac | ";
if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc )
@@ -17616,9 +17577,9 @@ namespace VULKAN_HPP_NAMESPACE
{
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeFrame ) |
- VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeSlice ) |
- VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeNonVcl )
+ allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) |
+ VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) |
+ VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
};
};
@@ -17650,14 +17611,14 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
- if ( value & VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeFrame )
- result += "VkVideoEncodeH264InputModeFrame | ";
- if ( value & VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeSlice )
- result += "VkVideoEncodeH264InputModeSlice | ";
- if ( value & VideoEncodeH264InputModeFlagBitsEXT::eVkVideoEncodeH264InputModeNonVcl )
- result += "VkVideoEncodeH264InputModeNonVcl | ";
+ std::string result;
+ if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame )
+ result += "Frame | ";
+ if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice )
+ result += "Slice | ";
+ if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
+ result += "NonVcl | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -17668,9 +17629,9 @@ namespace VULKAN_HPP_NAMESPACE
{
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeFrame ) |
- VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeSlice ) |
- VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeNonVcl )
+ allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) |
+ VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) |
+ VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
};
};
@@ -17702,14 +17663,14 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
- if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeFrame )
- result += "VkVideoEncodeH264OutputModeFrame | ";
- if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeSlice )
- result += "VkVideoEncodeH264OutputModeSlice | ";
- if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eVkVideoEncodeH264OutputModeNonVcl )
- result += "VkVideoEncodeH264OutputModeNonVcl | ";
+ std::string result;
+ if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame )
+ result += "Frame | ";
+ if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice )
+ result += "Slice | ";
+ if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
+ result += "NonVcl | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -17720,8 +17681,8 @@ namespace VULKAN_HPP_NAMESPACE
{
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateDefault ) |
- VkFlags( VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateReserved0 )
+ allFlags =
+ VkFlags( VideoEncodeH264CreateFlagBitsEXT::eDefault ) | VkFlags( VideoEncodeH264CreateFlagBitsEXT::eReserved0 )
};
};
@@ -17753,16 +17714,15 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
- if ( value & VideoEncodeH264CreateFlagBitsEXT::eVkVideoEncodeH264CreateReserved0 )
- result += "VkVideoEncodeH264CreateReserved0 | ";
+ std::string result;
+ if ( value & VideoEncodeH264CreateFlagBitsEXT::eReserved0 )
+ result += "Reserved0 | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_EXT_video_decode_h264 ===
using VideoDecodeH264FieldLayoutFlagsEXT = Flags<VideoDecodeH264FieldLayoutFlagBitsEXT>;
@@ -17773,8 +17733,8 @@ namespace VULKAN_HPP_NAMESPACE
enum : VkFlags
{
allFlags = VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly ) |
- VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutLineInterlacedPlane ) |
- VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane )
+ VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane ) |
+ VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane )
};
};
@@ -17806,12 +17766,12 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
- if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutLineInterlacedPlane )
- result += "VkVideoDecodeH264FieldLayoutLineInterlacedPlane | ";
- if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264FieldLayoutSeparateInterlacedPlane )
- result += "VkVideoDecodeH264FieldLayoutSeparateInterlacedPlane | ";
+ std::string result;
+ if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane )
+ result += "LineInterlacedPlane | ";
+ if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane )
+ result += "SeparateInterlacedPlane | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -17833,7 +17793,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_GGP )
-
//=== VK_GGP_stream_descriptor_surface ===
enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags
@@ -17897,8 +17856,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 )
result += "OpaqueWin32 | ";
if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt )
@@ -17951,8 +17910,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly )
result += "DedicatedOnly | ";
if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable )
@@ -17963,7 +17922,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_USE_PLATFORM_VI_NN )
-
//=== VK_NN_vi_surface ===
enum class ViSurfaceCreateFlagBitsNN : VkFlags
@@ -18024,8 +17982,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ConditionalRenderingFlagBitsEXT::eInverted )
result += "Inverted | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -18072,8 +18030,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SurfaceCounterFlagBitsEXT::eVblank )
result += "Vblank | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -18194,8 +18152,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting )
result += "PerformanceImpacting | ";
if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
@@ -18211,7 +18169,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_USE_PLATFORM_IOS_MVK )
-
//=== VK_MVK_ios_surface ===
enum class IOSSurfaceCreateFlagBitsMVK : VkFlags
@@ -18232,7 +18189,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
-
//=== VK_MVK_macos_surface ===
enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags
@@ -18296,8 +18252,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose )
result += "Verbose | ";
if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo )
@@ -18350,8 +18306,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral )
result += "General | ";
if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation )
@@ -18453,8 +18409,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & GeometryFlagBitsKHR::eOpaque )
result += "Opaque | ";
if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation )
@@ -18506,8 +18462,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable )
result += "TriangleFacingCullDisable | ";
if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise )
@@ -18564,8 +18520,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate )
result += "AllowUpdate | ";
if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction )
@@ -18618,8 +18574,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay )
result += "DeviceAddressCaptureReplay | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -18671,7 +18627,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_EXT_video_decode_h265 ===
enum class VideoDecodeH265CreateFlagBitsEXT : VkFlags
@@ -18734,8 +18689,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid )
result += "Valid | ";
if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit )
@@ -18746,7 +18701,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_USE_PLATFORM_FUCHSIA )
-
//=== VK_FUCHSIA_imagepipe_surface ===
enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags
@@ -18767,7 +18721,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
-
//=== VK_EXT_metal_surface ===
enum class MetalSurfaceCreateFlagBitsEXT : VkFlags
@@ -18840,8 +18793,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & ToolPurposeFlagBitsEXT::eValidation )
result += "Validation | ";
if ( value & ToolPurposeFlagBitsEXT::eProfiling )
@@ -18936,8 +18889,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & IndirectStateFlagBitsNV::eFlagFrontface )
result += "FlagFrontface | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -18984,8 +18937,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess )
result += "ExplicitPreprocess | ";
if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences )
@@ -19023,7 +18976,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#if defined( VK_ENABLE_BETA_EXTENSIONS )
-
//=== VK_KHR_video_encode_queue ===
using VideoEncodeFlagsKHR = Flags<VideoEncodeFlagBitsKHR>;
@@ -19065,8 +19017,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoEncodeFlagBitsKHR::eReserved0 )
result += "Reserved0 | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -19112,8 +19064,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & VideoEncodeRateControlFlagBitsKHR::eReset )
result += "Reset | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
@@ -19160,8 +19112,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -19209,8 +19161,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo )
result += "EnableShaderDebugInfo | ";
if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking )
@@ -19251,14 +19203,14 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags64( PipelineStageFlagBits2KHR::eVideoDecode ) | VkFlags64( PipelineStageFlagBits2KHR::eVideoEncode )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( PipelineStageFlagBits2KHR::eTransformFeedbackExt ) |
- VkFlags64( PipelineStageFlagBits2KHR::eConditionalRenderingExt ) |
- VkFlags64( PipelineStageFlagBits2KHR::eCommandPreprocessNv ) |
+ | VkFlags64( PipelineStageFlagBits2KHR::eTransformFeedbackEXT ) |
+ VkFlags64( PipelineStageFlagBits2KHR::eConditionalRenderingEXT ) |
+ VkFlags64( PipelineStageFlagBits2KHR::eCommandPreprocessNV ) |
VkFlags64( PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment ) |
VkFlags64( PipelineStageFlagBits2KHR::eAccelerationStructureBuild ) |
VkFlags64( PipelineStageFlagBits2KHR::eRayTracingShader ) |
- VkFlags64( PipelineStageFlagBits2KHR::eFragmentDensityProcessExt ) |
- VkFlags64( PipelineStageFlagBits2KHR::eTaskShaderNv ) | VkFlags64( PipelineStageFlagBits2KHR::eMeshShaderNv )
+ VkFlags64( PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT ) |
+ VkFlags64( PipelineStageFlagBits2KHR::eTaskShaderNV ) | VkFlags64( PipelineStageFlagBits2KHR::eMeshShaderNV )
};
};
@@ -19290,8 +19242,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & PipelineStageFlagBits2KHR::eTopOfPipe )
result += "TopOfPipe | ";
if ( value & PipelineStageFlagBits2KHR::eDrawIndirect )
@@ -19348,24 +19300,24 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & PipelineStageFlagBits2KHR::eVideoEncode )
result += "VideoEncode | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & PipelineStageFlagBits2KHR::eTransformFeedbackExt )
- result += "TransformFeedbackExt | ";
- if ( value & PipelineStageFlagBits2KHR::eConditionalRenderingExt )
- result += "ConditionalRenderingExt | ";
- if ( value & PipelineStageFlagBits2KHR::eCommandPreprocessNv )
- result += "CommandPreprocessNv | ";
+ if ( value & PipelineStageFlagBits2KHR::eTransformFeedbackEXT )
+ result += "TransformFeedbackEXT | ";
+ if ( value & PipelineStageFlagBits2KHR::eConditionalRenderingEXT )
+ result += "ConditionalRenderingEXT | ";
+ if ( value & PipelineStageFlagBits2KHR::eCommandPreprocessNV )
+ result += "CommandPreprocessNV | ";
if ( value & PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment )
result += "FragmentShadingRateAttachment | ";
if ( value & PipelineStageFlagBits2KHR::eAccelerationStructureBuild )
result += "AccelerationStructureBuild | ";
if ( value & PipelineStageFlagBits2KHR::eRayTracingShader )
result += "RayTracingShader | ";
- if ( value & PipelineStageFlagBits2KHR::eFragmentDensityProcessExt )
- result += "FragmentDensityProcessExt | ";
- if ( value & PipelineStageFlagBits2KHR::eTaskShaderNv )
- result += "TaskShaderNv | ";
- if ( value & PipelineStageFlagBits2KHR::eMeshShaderNv )
- result += "MeshShaderNv | ";
+ if ( value & PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT )
+ result += "FragmentDensityProcessEXT | ";
+ if ( value & PipelineStageFlagBits2KHR::eTaskShaderNV )
+ result += "TaskShaderNV | ";
+ if ( value & PipelineStageFlagBits2KHR::eMeshShaderNV )
+ result += "MeshShaderNV | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -19392,17 +19344,17 @@ namespace VULKAN_HPP_NAMESPACE
| VkFlags64( AccessFlagBits2KHR::eVideoDecodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoDecodeWrite ) |
VkFlags64( AccessFlagBits2KHR::eVideoEncodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoEncodeWrite )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( AccessFlagBits2KHR::eTransformFeedbackWriteExt ) |
- VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterReadExt ) |
- VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterWriteExt ) |
- VkFlags64( AccessFlagBits2KHR::eConditionalRenderingReadExt ) |
- VkFlags64( AccessFlagBits2KHR::eCommandPreprocessReadNv ) |
- VkFlags64( AccessFlagBits2KHR::eCommandPreprocessWriteNv ) |
+ | VkFlags64( AccessFlagBits2KHR::eTransformFeedbackWriteEXT ) |
+ VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT ) |
+ VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT ) |
+ VkFlags64( AccessFlagBits2KHR::eConditionalRenderingReadEXT ) |
+ VkFlags64( AccessFlagBits2KHR::eCommandPreprocessReadNV ) |
+ VkFlags64( AccessFlagBits2KHR::eCommandPreprocessWriteNV ) |
VkFlags64( AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead ) |
VkFlags64( AccessFlagBits2KHR::eAccelerationStructureRead ) |
VkFlags64( AccessFlagBits2KHR::eAccelerationStructureWrite ) |
- VkFlags64( AccessFlagBits2KHR::eFragmentDensityMapReadExt ) |
- VkFlags64( AccessFlagBits2KHR::eColorAttachmentReadNoncoherentExt )
+ VkFlags64( AccessFlagBits2KHR::eFragmentDensityMapReadEXT ) |
+ VkFlags64( AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT )
};
};
@@ -19433,8 +19385,8 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & AccessFlagBits2KHR::eIndirectCommandRead )
result += "IndirectCommandRead | ";
if ( value & AccessFlagBits2KHR::eIndexRead )
@@ -19491,28 +19443,28 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & AccessFlagBits2KHR::eVideoEncodeWrite )
result += "VideoEncodeWrite | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & AccessFlagBits2KHR::eTransformFeedbackWriteExt )
- result += "TransformFeedbackWriteExt | ";
- if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterReadExt )
- result += "TransformFeedbackCounterReadExt | ";
- if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterWriteExt )
- result += "TransformFeedbackCounterWriteExt | ";
- if ( value & AccessFlagBits2KHR::eConditionalRenderingReadExt )
- result += "ConditionalRenderingReadExt | ";
- if ( value & AccessFlagBits2KHR::eCommandPreprocessReadNv )
- result += "CommandPreprocessReadNv | ";
- if ( value & AccessFlagBits2KHR::eCommandPreprocessWriteNv )
- result += "CommandPreprocessWriteNv | ";
+ if ( value & AccessFlagBits2KHR::eTransformFeedbackWriteEXT )
+ result += "TransformFeedbackWriteEXT | ";
+ if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT )
+ result += "TransformFeedbackCounterReadEXT | ";
+ if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT )
+ result += "TransformFeedbackCounterWriteEXT | ";
+ if ( value & AccessFlagBits2KHR::eConditionalRenderingReadEXT )
+ result += "ConditionalRenderingReadEXT | ";
+ if ( value & AccessFlagBits2KHR::eCommandPreprocessReadNV )
+ result += "CommandPreprocessReadNV | ";
+ if ( value & AccessFlagBits2KHR::eCommandPreprocessWriteNV )
+ result += "CommandPreprocessWriteNV | ";
if ( value & AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead )
result += "FragmentShadingRateAttachmentRead | ";
if ( value & AccessFlagBits2KHR::eAccelerationStructureRead )
result += "AccelerationStructureRead | ";
if ( value & AccessFlagBits2KHR::eAccelerationStructureWrite )
result += "AccelerationStructureWrite | ";
- if ( value & AccessFlagBits2KHR::eFragmentDensityMapReadExt )
- result += "FragmentDensityMapReadExt | ";
- if ( value & AccessFlagBits2KHR::eColorAttachmentReadNoncoherentExt )
- result += "ColorAttachmentReadNoncoherentExt | ";
+ if ( value & AccessFlagBits2KHR::eFragmentDensityMapReadEXT )
+ result += "FragmentDensityMapReadEXT | ";
+ if ( value & AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT )
+ result += "ColorAttachmentReadNoncoherentEXT | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -19554,15 +19506,14 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !value )
return "{}";
- std::string result;
+ std::string result;
if ( value & SubmitFlagBitsKHR::eProtected )
result += "Protected | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
-
//=== VK_EXT_directfb_surface ===
enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags
@@ -19583,7 +19534,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
-
//=== VK_QNX_screen_surface ===
enum class ScreenSurfaceCreateFlagBitsQNX : VkFlags
@@ -29168,7 +29118,7 @@ namespace VULKAN_HPP_NAMESPACE
struct CheckpointData2NV
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2Nv;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage_ = {},
@@ -29219,7 +29169,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2Nv;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage = {};
void * pCheckpointMarker = {};
@@ -29229,7 +29179,7 @@ namespace VULKAN_HPP_NAMESPACE
static_assert( std::is_standard_layout<CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCheckpointData2Nv>
+ struct CppType<StructureType, StructureType::eCheckpointData2NV>
{
using Type = CheckpointData2NV;
};
@@ -37501,6 +37451,8 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
+ //=== VK_VERSION_1_0 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
@@ -37512,101 +37464,90 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ typename ResultValueType<void>::type
+ reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- uint32_t index,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewport( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const RenderPassBeginInfo & renderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewport( uint32_t firstViewport,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setScissor( uint32_t firstScissor,
+ uint32_t scissorCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin,
- const SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setScissor( uint32_t firstScissor,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setLineWidth( float lineWidth,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin,
- const SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void setDepthBias( float depthBiasConstantFactor,
+ float depthBiasClamp,
+ float depthBiasSlopeFactor,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setBlendConstants( const float blendConstants[4],
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void setDepthBounds( float minDepthBounds,
+ float maxDepthBounds,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
+ void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ uint32_t compareMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ uint32_t writeMask,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo,
+ void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ uint32_t reference,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
@@ -37634,42 +37575,6 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t groupIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
- uint32_t bindingCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindVertexBuffers( uint32_t firstBinding,
uint32_t bindingCount,
const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
@@ -37685,22 +37590,75 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers2EXT( uint32_t firstBinding,
- uint32_t bindingCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void draw( uint32_t vertexCount,
+ uint32_t instanceCount,
+ uint32_t firstVertex,
+ uint32_t firstInstance,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexed( uint32_t indexCount,
+ uint32_t instanceCount,
+ uint32_t firstIndex,
+ int32_t vertexOffset,
+ uint32_t firstInstance,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatch( uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers2EXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -37724,83 +37682,57 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresIndirectKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
- const uint32_t * pIndirectStrides,
- const uint32_t * const * ppMaxPrimitiveCounts,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresIndirectKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
- ArrayProxy<const uint32_t> const & indirectStrides,
- ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
+ const void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ ArrayProxy<const T> const & data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( uint32_t attachmentCount,
- const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
- uint32_t rectCount,
- const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ uint32_t data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
@@ -37836,176 +37768,238 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void clearAttachments( uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
+ uint32_t rectCount,
+ const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents( uint32_t eventCount,
+ const VULKAN_HPP_NAMESPACE::Event * pEvents,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ uint32_t memoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,
+ void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo,
+ void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ uint32_t size,
+ const void * pValues,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ ArrayProxy<const T> const & values,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass( const RenderPassBeginInfo & renderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void executeCommands( uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_1 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void setDeviceMask( uint32_t deviceMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ void dispatchBase( uint32_t baseGroupX,
+ uint32_t baseGroupY,
+ uint32_t baseGroupZ,
+ uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_VERSION_1_2 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,
+ void
+ drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin,
+ const SubpassBeginInfo & subpassBeginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo,
+ const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2( const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_debug_marker ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
@@ -38028,103 +38022,115 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_queue ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatch( uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchBase( uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchBaseKHR( uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_decode_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void draw( uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_EXT_transform_feedback ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexed( uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindTransformFeedbackBuffersEXT(
+ uint32_t firstBinding,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
+ uint32_t index,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ uint32_t index,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
@@ -38136,26 +38142,21 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t vertexStride,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NVX_binary_import ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
+ void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_AMD_draw_indirect_count ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
@@ -38164,135 +38165,151 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
+ //=== VK_KHR_device_group ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDeviceMaskKHR( uint32_t deviceMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksNV( uint32_t taskCount,
- uint32_t firstTask,
+ void dispatchBaseKHR( uint32_t baseGroupX,
+ uint32_t baseGroupY,
+ uint32_t baseGroupZ,
+ uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_push_descriptor ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ uint32_t descriptorWriteCount,
+ const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ const void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_EXT_conditional_rendering ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ void beginConditionalRenderingEXT(
+ const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- uint32_t index,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_clip_space_w_scaling ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportWScalingNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2( const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportWScalingNV( uint32_t firstViewport,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_discard_rectangles ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ uint32_t discardRectangleCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_create_renderpass2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin,
+ const SubpassBeginInfo & subpassBeginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo,
+ const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_debug_utils ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void
+ beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void
+ beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- uint32_t data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
@@ -38305,166 +38322,125 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_EXT_sample_locations ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo,
- const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo,
- const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_acceleration_structure ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructuresKHR(
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructuresKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructuresIndirectKHR(
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
+ const uint32_t * pIndirectStrides,
+ const uint32_t * const * ppMaxPrimitiveCounts,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructuresIndirectKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
+ ArrayProxy<const uint32_t> const & indirectStrides,
+ ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void * pValues,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureToMemoryKHR(
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- ArrayProxy<const T> const & values,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyMemoryToAccelerationStructureKHR(
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ void writeAccelerationStructuresPropertiesKHR(
+ uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void writeAccelerationStructuresPropertiesKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_shading_rate_image ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void
+ bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportShadingRatePaletteNV(
+ uint32_t firstViewport,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setBlendConstants( const float blendConstants[4],
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCheckpointNV( const void * pCheckpointMarker,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
uint32_t customSampleOrderCount,
@@ -38478,110 +38454,186 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_ray_tracing ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setColorWriteEnableEXT( uint32_t attachmentCount,
- const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBias( float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
+ VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
+ VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void writeAccelerationStructuresPropertiesNV(
+ uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesNV(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_draw_indirect_count ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBounds( float minDepthBounds,
- float maxDepthBounds,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
+ void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
+ //=== VK_AMD_buffer_marker ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
+ void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ uint32_t marker,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_mesh_shader ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawMeshTasksNV( uint32_t taskCount,
+ uint32_t firstTask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMask( uint32_t deviceMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMaskKHR( uint32_t deviceMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_scissor_exclusive ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ uint32_t exclusiveScissorCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_device_diagnostic_checkpoints ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCheckpointNV( const void * pCheckpointMarker,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_INTEL_performance_query ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- const DependencyInfoKHR & dependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_fragment_shading_rate ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
@@ -38596,224 +38648,267 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_EXT_line_rasterization ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setLineStippleEXT( uint32_t lineStippleFactor,
uint16_t lineStipplePattern,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_EXT_extended_dynamic_state ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLineWidth( float lineWidth,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- setPatchControlPointsEXT( uint32_t patchControlPoints,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ setViewportWithCountEXT( uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void
+ setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ setScissorWithCountEXT( uint32_t scissorCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void
+ setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void bindVertexBuffers2EXT( uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void bindVertexBuffers2EXT(
+ uint32_t firstBinding,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void
+ setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
+ setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
+ void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ VULKAN_HPP_NAMESPACE::StencilOp failOp,
+ VULKAN_HPP_NAMESPACE::StencilOp passOp,
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_device_generated_commands ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const GeneratedCommandsInfoNV & generatedCommandsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t groupIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_encode_queue ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_KHR_synchronization2 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ const DependencyInfoKHR & dependencyInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor,
- uint32_t scissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents2KHR( uint32_t eventCount,
+ const VULKAN_HPP_NAMESPACE::Event * pEvents,
+ const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setScissorWithCountEXT( uint32_t scissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
+ void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t compareMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- VULKAN_HPP_NAMESPACE::StencilOp failOp,
- VULKAN_HPP_NAMESPACE::StencilOp passOp,
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
- VULKAN_HPP_NAMESPACE::CompareOp compareOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ uint32_t marker,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t reference,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_fragment_shading_rate_enums ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t writeMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_KHR_copy_commands2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
- uint32_t vertexAttributeDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
+ void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setVertexInputEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
+ void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
+ void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setViewportWithCountEXT( uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_ray_tracing_pipeline ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
@@ -38836,154 +38931,81 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
- VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
- VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
- const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- ArrayProxy<const T> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( uint32_t eventCount,
- const VULKAN_HPP_NAMESPACE::Event * pEvents,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents2KHR( uint32_t eventCount,
- const VULKAN_HPP_NAMESPACE::Event * pEvents,
- const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_vertex_input_dynamic_state ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
+ uint32_t vertexAttributeDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
+ void setVertexInputEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_extended_dynamic_state2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- uint32_t marker,
+ void
+ setPatchControlPointsEXT( uint32_t patchControlPoints,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- uint32_t marker,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_color_write_enable ===
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ void
+ setColorWriteEnableEXT( uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void
+ setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
@@ -54116,51 +54138,30 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getCheckpointData2NV( uint32_t * pCheckpointDataCount,
- VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
- getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CheckpointData2NVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
- getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_0 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getCheckpointDataNV( uint32_t * pCheckpointDataCount,
- VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ submit( uint32_t submitCount,
+ const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
- getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CheckpointDataNVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
- getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -54177,6 +54178,31 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_swapchain ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result
+ presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_debug_utils ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void
+ beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void
+ beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
@@ -54191,16 +54217,28 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_device_diagnostic_checkpoints ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getCheckpointDataNV( uint32_t * pCheckpointDataCount,
+ VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
+ getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = CheckpointDataNVAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
+ getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_INTEL_performance_query ===
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL(
@@ -54209,23 +54247,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- submit( uint32_t submitCount,
- const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_synchronization2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
@@ -54241,14 +54267,22 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void getCheckpointData2NV( uint32_t * pCheckpointDataCount,
+ VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
+ getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = CheckpointData2NVAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
+ getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
@@ -58688,167 +58722,53 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
-# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ //=== VK_VERSION_1_0 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
- uint32_t * pImageIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ PFN_vkVoidFunction
+ getProcAddr( const char * pName,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<uint32_t>
- acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ PFN_vkVoidFunction
+ getProcAddr( const std::string & name,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence,
- uint32_t * pImageIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<uint32_t>
- acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getQueue( uint32_t queueFamilyIndex,
+ uint32_t queueIndex,
+ VULKAN_HPP_NAMESPACE::Queue * pQueue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
- acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
- acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
+ getQueue( uint32_t queueFamilyIndex,
+ uint32_t queueIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
- allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CommandBufferAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
- allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
- CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
- allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>,
- typename B = CommandBufferAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
- allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
- CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
- allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DescriptorSetAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
- allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
- DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
- allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>,
- typename B = DescriptorSetAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
- allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
- DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
@@ -58871,651 +58791,543 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV(
- uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindBufferMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- bindBufferMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
+ void ** ppData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void *>::type
+ mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- bindImageMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ flushMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindImageMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges(
+ uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t videoSessionBindMemoryCount,
- const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindVideoSessionMemoryKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
+ getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t shader,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t shader,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(
+ VULKAN_HPP_NAMESPACE::Buffer buffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(
+ VULKAN_HPP_NAMESPACE::Image image,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ void getImageSparseMemoryRequirements(
+ VULKAN_HPP_NAMESPACE::Image image,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = SparseImageMemoryRequirementsAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
- createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ createFence( const FenceCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
- createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ createFenceUnique( const FenceCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createAccelerationStructureNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(
- const AccelerationStructureCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
- createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ resetFences( uint32_t fenceCount,
+ const VULKAN_HPP_NAMESPACE::Fence * pFences,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
- createBuffer( const BufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ typename ResultValueType<void>::type
+ resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
- createBufferUnique( const BufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD Result
+ getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::BufferView * pView,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ waitForFences( uint32_t fenceCount,
+ const VULKAN_HPP_NAMESPACE::Fence * pFences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
- createBufferView( const BufferViewCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
- createBufferViewUnique( const BufferViewCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
- createCommandPool( const CommandPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
+ createSemaphore( const SemaphoreCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
- createCommandPoolUnique( const CommandPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
+ createSemaphoreUnique( const SemaphoreCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>>
- createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Event * pEvent,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
- createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
+ createEvent( const EventCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
- createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
+ createEventUnique( const EventCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
- createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
- createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDeferredOperationKHR(
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroy( VULKAN_HPP_NAMESPACE::Event event,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
- createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD Result
+ getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
- createDescriptorPool( const DescriptorPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
- createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD Result
+ resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type
+ resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDescriptorSetLayout(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
- createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
+ createQueryPool( const QueryPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
- createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
+ createQueryPoolUnique( const QueryPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD Result
+ getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ size_t dataSize,
+ void * pData,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result
+ getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ ArrayProxy<T> const & data,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T,
+ typename Allocator = std::allocator<T>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<T, Allocator>>
+ getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ size_t dataSize,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<T>
+ getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Event * pEvent,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
- createEvent( const EventCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
+ createBuffer( const BufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
- createEventUnique( const EventCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
+ createBufferUnique( const BufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- createFence( const FenceCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- createFenceUnique( const FenceCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::BufferView * pView,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
- createFramebuffer( const FramebufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
+ createBufferView( const BufferViewCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
- createFramebufferUnique( const FramebufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
+ createBufferViewUnique( const BufferViewCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>>
- createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void
+ destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
@@ -59540,6 +59352,42 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::Image image,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout(
+ VULKAN_HPP_NAMESPACE::Image image,
+ const ImageSubresource & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
@@ -59563,31 +59411,74 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV(
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
- createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result
+ createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
+ createShaderModule( const ShaderModuleCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
- createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
+ createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void
+ destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
@@ -59611,118 +59502,103 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
- createPipelineLayout( const PipelineLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
- createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT(
- const PrivateDataSlotCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type
- createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = Uint8_tAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Uint8_tAllocator & uint8_tAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
- createQueryPool( const QueryPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ VULKAN_HPP_NODISCARD Result
+ mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
- createQueryPoolUnique( const QueryPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineAllocator = std::allocator<Pipeline>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename PipelineAllocator = std::allocator<Pipeline>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B = PipelineAllocator,
typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ createGraphicsPipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
@@ -59731,62 +59607,60 @@ namespace VULKAN_HPP_NAMESPACE
typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
+ createGraphicsPipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineAllocator = std::allocator<Pipeline>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename PipelineAllocator = std::allocator<Pipeline>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B = PipelineAllocator,
typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ createComputePipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
@@ -59795,85 +59669,61 @@ namespace VULKAN_HPP_NAMESPACE
typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
+ createComputePipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- createRenderPass( const RenderPassCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- createRenderPassUnique( const RenderPassCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- createRenderPass2( const RenderPassCreateInfo2 & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo,
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
+ createPipelineLayout( const PipelineLayoutCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo,
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
+ createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
@@ -59881,6 +59731,29 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyPipelineLayout(
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
@@ -59903,995 +59776,1016 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion(
- const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR(
- const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDescriptorSetLayout(
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
- createSemaphore( const SemaphoreCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
+ createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
- createSemaphoreUnique( const SemaphoreCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
+ createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
- createShaderModule( const ShaderModuleCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
- createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroyDescriptorSetLayout(
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR(
- uint32_t swapchainCount,
- const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SwapchainKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- SwapchainKHRAllocator & swapchainKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR(
- const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHRUnique(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>,
- typename B = SwapchainKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHRUnique(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- SwapchainKHRAllocator & swapchainKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type
- createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
- createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
+ createDescriptorPool( const DescriptorPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
- createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
+ createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(
- const ValidationCacheCreateInfoEXT & createInfo,
+ void destroyDescriptorPool(
+ VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
- createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
- createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
- createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
- createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
- createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ typename ResultValueType<void>::type
+ resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
+ allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DescriptorSetAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
+ allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
+ DescriptorSetAllocator & descriptorSetAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>,
+ typename B = DescriptorSetAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
+ DescriptorSetAllocator & descriptorSetAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type
+ freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type
+ free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void updateDescriptorSets( uint32_t descriptorWriteCount,
+ const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
+ createFramebuffer( const FramebufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
+ createFramebufferUnique( const FramebufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ createRenderPass( const RenderPassCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ createRenderPassUnique( const RenderPassCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
+ getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
+ createCommandPool( const CommandPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
+ createCommandPoolUnique( const CommandPoolCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
- destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDeferredOperationKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ typename ResultValueType<void>::type
+ resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
+ allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = CommandBufferAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
+ allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
+ CommandBufferAllocator & commandBufferAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>,
+ typename B = CommandBufferAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
+ CommandBufferAllocator & commandBufferAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorPool(
- VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_1 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ bindBufferMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout(
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ bindImageMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void getGroupPeerMemoryFeatures( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures(
+ uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(
+ const ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2(
+ const ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(
+ const BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2(
+ const BufferMemoryRequirementsInfo2 & info,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements2(
+ const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = SparseImageMemoryRequirements2Allocator,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements2(
+ const ImageSparseMemoryRequirementsInfo2 & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
+ VULKAN_HPP_NAMESPACE::Queue * pQueue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Event event,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
+ getQueue2( const DeviceQueueInfo2 & queueInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion(
+ const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversion(
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate(
+ const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Image image,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplate(
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void * pData,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(
+ const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(
+ const DescriptorSetLayoutCreateInfo & createInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ createRenderPass2( const RenderPassCreateInfo2 & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ uint64_t * pValue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
+ getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ signalSemaphore( const SemaphoreSignalInfo & signalInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress
+ getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineLayout(
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress
+ getBufferAddress( const BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPrivateDataSlotEXT(
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_swapchain ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
+ createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
+ createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ void
+ destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pSwapchainImageCount,
+ VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename ImageAllocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
+ getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename ImageAllocator = std::allocator<Image>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = ImageAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
+ getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ ImageAllocator & imageAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ uint32_t * pImageIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD ResultValue<uint32_t>
+ acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR(
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
+ getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR(
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
+ uint32_t * pImageIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD ResultValue<uint32_t>
+ acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_display_swapchain ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR(
+ uint32_t swapchainCount,
+ const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = SwapchainKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ SwapchainKHRAllocator & swapchainKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR(
+ const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHRUnique(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>,
+ typename B = SwapchainKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHRUnique(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ SwapchainKHRAllocator & swapchainKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type
+ createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_debug_marker ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT(
+ const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT(
+ const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+ VULKAN_HPP_NODISCARD Result
+ createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
+ createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
+ createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void
destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
@@ -60904,9 +60798,7 @@ namespace VULKAN_HPP_NAMESPACE
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
@@ -60917,9 +60809,84 @@ namespace VULKAN_HPP_NAMESPACE
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t * pVideoSessionMemoryRequirementsCount,
+ VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
+ getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = VideoGetMemoryPropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
+ getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t videoSessionBindMemoryCount,
+ const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindVideoSessionMemoryKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR(
+ const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
+ createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
+ createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VideoSessionParametersUpdateInfoKHR & updateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
@@ -60932,9 +60899,7 @@ namespace VULKAN_HPP_NAMESPACE
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
@@ -60947,542 +60912,565 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NVX_binary_import ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayPowerInfoEXT & displayPowerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
+ createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
+ createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- flushMappedMemoryRanges( uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
+ createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
+ createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NVX_image_view_handle ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint32_t
+ getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint32_t
+ getImageViewHandleNVX( const ImageViewHandleInfoNVX & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
+ getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_AMD_shader_info ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
- const uint32_t * pMaxPrimitiveCounts,
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ size_t * pInfoSize,
+ void * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
- getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const AccelerationStructureBuildGeometryInfoKHR & buildInfo,
- ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = Uint8_tAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ Uint8_tAllocator & uint8_tAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_external_memory_win32 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_device_group ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getAccelerationStructureAddressKHR(
- const AccelerationStructureDeviceAddressInfoKHR & info,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR(
+ uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_maintenance1 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- ArrayProxy<T> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T,
- typename Allocator = std::allocator<T>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
- getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
- getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_external_memory_win32 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
- const AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(
- const AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID(
- const struct AHardwareBuffer * buffer,
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
- getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
- getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
+ getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddress( const BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_external_memory_fd ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressEXT( const BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
+ getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ int fd,
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressKHR( const BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
+ getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ int fd,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Buffer buffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_external_semaphore_win32 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(
- const BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2(
- const BufferMemoryRequirementsInfo2 & info,
+ VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR(
+ const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(
- const BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(
- const BufferMemoryRequirementsInfo2 & info,
+ VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR(
+ const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_external_semaphore_fd ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
+ getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_descriptor_update_template ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT(
- uint32_t timestampCount,
- const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
- uint64_t * pTimestamps,
- uint64_t * pMaxDeviation,
+ VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR(
+ const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- ArrayProxy<uint64_t> const & timestamps,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint64_tAllocator = std::allocator<uint64_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint64_tAllocator = std::allocator<uint64_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint64_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Uint64_tAllocator & uint64_tAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(
- const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(
- const DescriptorSetLayoutCreateInfo & createInfo,
+ void destroyDescriptorUpdateTemplateKHR(
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(
- const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(
- const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_display_control ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
- VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
- getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ typename ResultValueType<void>::type
+ displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayPowerInfoEXT & displayPowerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGroupPeerMemoryFeatures( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures(
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR(
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayEventInfoEXT & displayEventInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayEventInfoEXT & displayEventInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ uint64_t * pCounterValue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
- getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
+ getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ //=== VK_GOOGLE_display_timing ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
+ getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pPresentationTimingCount,
+ VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
- getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PastPresentationTimingGOOGLEAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_hdr_metadata ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void setHdrMetadataEXT( uint32_t swapchainCount,
+ const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
+ const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_create_renderpass2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_shared_presentable_image ===
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const char * pName,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const std::string & name,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_external_fence_win32 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueue( uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VULKAN_HPP_NAMESPACE::Queue * pQueue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR(
+ const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
- getQueue( uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
- VULKAN_HPP_NAMESPACE::Queue * pQueue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
- getQueue2( const DeviceQueueInfo2 & queueInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_external_fence_fd ===
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -61497,86 +61485,81 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_KHR_performance_query ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
- getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ void
+ releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_debug_utils ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT(
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(
- const GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(
- const GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
+ VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT(
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
- getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_ANDROID_external_memory_android_hardware_buffer ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(
- const ImageMemoryRequirementsInfo2 & info,
+ VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID(
+ const struct AHardwareBuffer * buffer,
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
+ getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2(
- const ImageMemoryRequirementsInfo2 & info,
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID(
+ const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
+ struct AHardwareBuffer ** pBuffer,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer *>::type
+ getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ //=== VK_KHR_get_memory_requirements2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
@@ -61595,50 +61578,19 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirementsAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(
+ const BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(
+ const BufferMemoryRequirementsInfo2 & info,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirements2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements2(
- const ImageSparseMemoryRequirementsInfo2 & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -61665,443 +61617,490 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_acceleration_structure ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
- VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
+ createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
+ createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout(
- VULKAN_HPP_NAMESPACE::Image image,
- const ImageSubresource & subresource,
+ void destroyAccelerationStructureKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
- getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t
- getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t
- getImageViewHandleNVX( const ImageViewHandleInfoNVX & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Result buildAccelerationStructuresKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
- struct AHardwareBuffer ** pBuffer,
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer *>::type
- getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ VULKAN_HPP_NODISCARD Result
+ copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const CopyAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
- getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result
+ copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
- getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result
+ copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
+ VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR(
+ uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
- getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ writeAccelerationStructuresPropertiesKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ ArrayProxy<T> const & data,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T,
+ typename Allocator = std::allocator<T>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
+ writeAccelerationStructuresPropertiesKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type writeAccelerationStructuresPropertyKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ DeviceAddress getAccelerationStructureAddressKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
- getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ DeviceAddress getAccelerationStructureAddressKHR(
+ const AccelerationStructureDeviceAddressInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void getAccelerationStructureCompatibilityKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
- getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
+ void getAccelerationStructureBuildSizesKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
+ const uint32_t * pMaxPrimitiveCounts,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
- getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
+ getAccelerationStructureBuildSizesKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const AccelerationStructureBuildGeometryInfoKHR & buildInfo,
+ ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_sampler_ycbcr_conversion ===
-#if defined( VK_USE_PLATFORM_FUCHSIA )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
+ VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR(
+ const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
- getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_FUCHSIA )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroySamplerYcbcrConversionKHR(
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_bind_memory2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
- getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ VULKAN_HPP_NODISCARD Result
+ bindBufferMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pPresentationTimingCount,
- VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
+ VULKAN_HPP_NODISCARD Result
+ bindImageMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_image_drm_format_modifier ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT(
+ VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
- getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PastPresentationTimingGOOGLEAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
- getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
+ getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_validation_cache ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
+ VULKAN_HPP_NODISCARD Result
+ createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(
+ const ValidationCacheCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
+ createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyValidationCacheEXT(
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
- getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- size_t * pDataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT(
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator = std::allocator<uint8_t>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Uint8_tAllocator = std::allocator<uint8_t>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B = Uint8_tAllocator,
typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Uint8_tAllocator & uint8_tAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_ray_tracing ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pInternalRepresentationCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
+ VULKAN_HPP_NODISCARD Result createAccelerationStructureNV(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutableInternalRepresentationKHRAllocator =
- std::allocator<PipelineExecutableInternalRepresentationKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
- getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PipelineExecutableInternalRepresentationKHRAllocator =
- std::allocator<PipelineExecutableInternalRepresentationKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineExecutableInternalRepresentationKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
- getPipelineExecutableInternalRepresentationsKHR(
- const PipelineExecutableInfoKHR & executableInfo,
- PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(
+ const AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
+ createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
- uint32_t * pExecutableCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineExecutablePropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- getPipelineExecutablePropertiesKHR(
- const PipelineInfoKHR & pipelineInfo,
- PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pStatisticCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
+ void destroyAccelerationStructureNV(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineExecutableStatisticKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- getPipelineExecutableStatisticsKHR(
- const PipelineExecutableInfoKHR & executableInfo,
- PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- uint64_t * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD uint64_t
- getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void * pData,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getAccelerationStructureMemoryRequirementsNV(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- ArrayProxy<T> const & data,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T,
- typename Allocator = std::allocator<T>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<T, Allocator>>
- getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<T>
- getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
+ const AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(
+ const AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
+ VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV(
+ uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T,
- typename Allocator = std::allocator<T>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
- getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
- getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
+ VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T,
- typename Allocator = std::allocator<T>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
- getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
- getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename PipelineAllocator = std::allocator<Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesNVUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
+ typename B = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesNVUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV(
@@ -62137,49 +62136,115 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t group,
- VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ ArrayProxy<T> const & data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T,
+ typename Allocator = std::allocator<T>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
+ getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ size_t dataSize,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
+ getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t shader,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t shader,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_maintenance3 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
- getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(
+ const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(
+ const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_external_memory_host ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
- getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
+ getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_calibrated_timestamps ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- uint64_t * pValue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT(
+ uint32_t timestampCount,
+ const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
+ uint64_t * pTimestamps,
+ uint64_t * pMaxDeviation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
- getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ ArrayProxy<uint64_t> const & timestamps,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint64_tAllocator = std::allocator<uint64_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ getCalibratedTimestampsEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint64_tAllocator = std::allocator<uint64_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = Uint64_tAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ getCalibratedTimestampsEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Uint64_tAllocator & uint64_tAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_timeline_semaphore ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR(
VULKAN_HPP_NAMESPACE::Semaphore semaphore,
@@ -62194,499 +62259,478 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
- int * pFd,
+ waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
- getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
- getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_INTEL_performance_query ===
-#if defined( VK_USE_PLATFORM_FUCHSIA )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
+ VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL(
+ const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
- getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- size_t * pInfoSize,
- void * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint8_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- uint64_t * pCounterValue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
- getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
+ acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
+ acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pSwapchainImageCount,
- VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename ImageAllocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename ImageAllocator = std::allocator<Image>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = ImageAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- ImageAllocator & imageAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL(
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- size_t * pDataSize,
- void * pData,
+ VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL(
+ VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
+ VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint8_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
+ getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_AMD_display_native_hdr ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t * pVideoSessionMemoryRequirementsCount,
- VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = VideoGetMemoryPropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
+ VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_buffer_device_address ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress
+ getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ DeviceAddress
+ getBufferAddressEXT( const BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_EXT_full_screen_exclusive ===
+
+# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
+ VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
+ VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#if defined( VK_USE_PLATFORM_FUCHSIA )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
+ VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(
- const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_buffer_device_address ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges(
- uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress
+ getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ DeviceAddress
+ getBufferAddressKHR( const BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
- void ** ppData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void *>::type
- mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_host_query_reset ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_KHR_deferred_host_operations ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDeferredOperationKHR(
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
+ createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayEventInfoEXT & displayEventInfo,
+ void destroyDeferredOperationKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayEventInfoEXT & displayEventInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
-# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# else
+ void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result
+ getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result
+ deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_KHR_pipeline_executable_properties ===
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
+ uint32_t * pExecutableCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PipelineExecutablePropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ getPipelineExecutablePropertiesKHR(
+ const PipelineInfoKHR & pipelineInfo,
+ PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR(
+ const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pStatisticCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PipelineExecutableStatisticKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ getPipelineExecutableStatisticsKHR(
+ const PipelineExecutableInfoKHR & executableInfo,
+ PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR(
+ const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pInternalRepresentationCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineExecutableInternalRepresentationKHRAllocator =
+ std::allocator<PipelineExecutableInternalRepresentationKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
+ PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo,
+ Dispatch const & d
+ VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename PipelineExecutableInternalRepresentationKHRAllocator =
+ std::allocator<PipelineExecutableInternalRepresentationKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PipelineExecutableInternalRepresentationKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
+ PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ getPipelineExecutableInternalRepresentationsKHR(
+ const PipelineExecutableInfoKHR & executableInfo,
+ PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_device_generated_commands ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- resetFences( uint32_t fenceCount,
- const VULKAN_HPP_NAMESPACE::Fence * pFences,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getGeneratedCommandsMemoryRequirementsNV(
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(
+ const GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(
+ const GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV(
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
+ createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
+ createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroyIndirectCommandsLayoutNV(
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_EXT_private_data ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- setEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT(
+ const PrivateDataSlotCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type
+ createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( uint32_t swapchainCount,
- const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
- const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void destroyPrivateDataSlotEXT(
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
- VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -62707,161 +62751,223 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ uint64_t * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- signalSemaphore( const SemaphoreSignalInfo & signalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD uint64_t
+ getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_ray_tracing_pipeline ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineAllocator = std::allocator<Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesKHRUnique(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
+ typename B = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesKHRUnique(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ ArrayProxy<T> const & data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T,
+ typename Allocator = std::allocator<T>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
+ getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
+ getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ ArrayProxy<T> const & data,
+ Dispatch const & d
+ VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T,
+ typename Allocator = std::allocator<T>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
+ getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ Dispatch const & d
+ VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
+ getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ Dispatch const & d
+ VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t group,
+ VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( uint32_t descriptorWriteCount,
- const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_external_memory ===
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
+ VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VideoSessionParametersUpdateInfoKHR & updateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
+ getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitForFences( uint32_t fenceCount,
- const VULKAN_HPP_NAMESPACE::Fence * pFences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
+ getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_external_semaphore ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(
+ const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- void * pData,
- size_t stride,
+ VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- ArrayProxy<T> const & data,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T,
- typename Allocator = std::allocator<T>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
- writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type writeAccelerationStructuresPropertyKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
+ getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
{
@@ -67626,34 +67732,85 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
-# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_VERSION_1_0 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
+ getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties
+ getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# else
+ getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
+ getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireXlibDisplayEXT( Display * dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireXlibDisplayEXT( Display & dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
+ getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void
+ getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = QueueFamilyPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
+ getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
@@ -67678,33 +67835,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
- createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayModeCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
- createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayModeCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties(
const char * pLayerName,
uint32_t * pPropertyCount,
@@ -67747,298 +67877,163 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- uint32_t * pCounterCount,
- VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
- enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
- enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
- Allocator const & vectorAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>,
- typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>,
- typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B1 = PerformanceCounterKHRAllocator,
- typename B2 = PerformanceCounterDescriptionKHRAllocator,
- typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
- std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
- PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
- getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <
- typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayModeProperties2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
- getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = SparseImageFormatPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
- getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayModePropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
- getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
- getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
+ getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
- getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
+ getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR(
- uint32_t planeIndex,
- uint32_t * pDisplayCount,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- DisplayKHRAllocator & displayKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT(
- uint32_t * pTimeDomainCount,
- VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = TimeDomainEXTAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
+ getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties2(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
- getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CooperativeMatrixPropertiesNVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
- getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
- IDirectFB * dfb,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
- IDirectFB & dfb,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
- getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <
- typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayPlaneProperties2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
- getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = QueueFamilyProperties2Allocator,
+ typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename StructureChain,
+ typename StructureChainAllocator = std::allocator<StructureChain>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
+ getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename StructureChain,
+ typename StructureChainAllocator = std::allocator<StructureChain>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = StructureChainAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
+ getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
- getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayPlanePropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
- getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDisplayProperties2KHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
- getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayProperties2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
- getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
+ getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDisplayPropertiesKHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties2(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
- getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayPropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
- getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = SparseImageFormatProperties2Allocator,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -68054,18 +68049,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(
- const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
@@ -68078,42 +68061,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(
- const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
- VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
- getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getExternalSemaphoreProperties(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
@@ -68125,256 +68072,382 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(
- const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
- getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
+ getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR(
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
+ getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = SurfaceFormatKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR(
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties
- getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PresentModeKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ PresentModeKHRAllocator & presentModeKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_swapchain ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pRectCount,
+ VULKAN_HPP_NAMESPACE::Rect2D * pRects,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
- getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename Rect2DAllocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
+ getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Rect2DAllocator = std::allocator<Rect2D>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = Rect2DAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
+ getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Rect2DAllocator & rect2DAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_display ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NODISCARD Result
+ getDisplayPropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayPropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR(
- uint32_t * pFragmentShadingRateCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
+ VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR(
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <
- typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
- getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceFragmentShadingRateKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
- getFragmentShadingRatesKHR(
- PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayPlanePropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR(
+ uint32_t planeIndex,
+ uint32_t * pDisplayCount,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
- getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
+ getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
+ getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
+ DisplayKHRAllocator & displayKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR(
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
- getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
- getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayModePropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
- getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
- getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
+ createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayModeCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
+ createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayModeCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR(
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
+ uint32_t planeIndex,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
- getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
+ getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
+ uint32_t planeIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+ //=== VK_KHR_xlib_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
+ Display * dpy,
+ VisualID visualID,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
+ Display & dpy,
+ VisualID visualID,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+ //=== VK_KHR_xcb_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
+ xcb_connection_t * connection,
+ xcb_visualid_t visual_id,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
+ xcb_connection_t & connection,
+ xcb_visualid_t visual_id,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+ //=== VK_KHR_wayland_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
+ struct wl_display * display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
+ struct wl_display & display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_win32_surface ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pRectCount,
- VULKAN_HPP_NAMESPACE::Rect2D * pRects,
+ getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Rect2DAllocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
+ getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Rect2DAllocator = std::allocator<Rect2D>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Rect2DAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Rect2DAllocator & rect2DAllocator,
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
+ uint32_t * pVideoFormatPropertyCount,
+ VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = VideoFormatPropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
+ VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_NV_external_memory_capabilities ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV(
+ VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
- getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
+ getExternalImageFormatPropertiesNV(
+ VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_get_physical_device_properties2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
+ getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -68390,68 +68463,35 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
- uint32_t * pNumPasses,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
- const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
+ getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
- getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = QueueFamilyPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
- getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = QueueFamilyProperties2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename StructureChain,
- typename StructureChainAllocator = std::allocator<StructureChain>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
- getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename StructureChain,
- typename StructureChainAllocator = std::allocator<StructureChain>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = StructureChainAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
- getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -68487,77 +68527,17 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_SCREEN_QNX )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
- struct _screen_window * window,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
- struct _screen_window & window,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void
+ getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
- getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageFormatPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
- getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageFormatProperties2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
+ getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -68583,32 +68563,85 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_external_memory_capabilities ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV(
- uint32_t * pCombinationCount,
- VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
+ void getExternalBufferPropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <
- typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
- getSupportedFramebufferMixedSamplesCombinationsNV(
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = FramebufferMixedSamplesCombinationNVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
- getSupportedFramebufferMixedSamplesCombinationsNV(
- FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(
+ const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_external_semaphore_capabilities ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalSemaphorePropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(
+ const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_direct_mode_display ===
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type
+ releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+ //=== VK_EXT_acquire_xlib_display ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result
+ acquireXlibDisplayEXT( Display * dpy,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ acquireXlibDisplayEXT( Display & dpy,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result
+ getRandROutputDisplayEXT( Display * dpy,
+ RROutput rrOutput,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(
+ Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ getRandROutputDisplayEXTUnique( Display & dpy,
+ RROutput rrOutput,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+ //=== VK_EXT_display_surface_counter ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT(
VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
@@ -68622,6 +68655,88 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_external_fence_capabilities ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getExternalFencePropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(
+ const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_performance_query ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ uint32_t * pCounterCount,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
+ enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = Allocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
+ enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
+ Allocator const & vectorAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>,
+ typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>,
+ typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = PerformanceCounterKHRAllocator,
+ typename B2 = PerformanceCounterDescriptionKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
+ std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
+ PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueueFamilyPerformanceQueryPassesKHR(
+ const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
+ uint32_t * pNumPasses,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
+ const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_get_surface_capabilities2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
@@ -68640,18 +68755,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
- getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
uint32_t * pSurfaceFormatCount,
@@ -68673,88 +68776,154 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_get_display_properties2 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getDisplayProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
- getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SurfaceFormatKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
- getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayProperties2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR(
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PresentModeKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- PresentModeKHRAllocator & presentModeKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayPlaneProperties2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR(
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PresentModeKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- PresentModeKHRAllocator & presentModeKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = DisplayModeProperties2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR(
+ const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
- getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
+ getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_sample_locations ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_calibrated_timestamps ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT(
+ uint32_t * pTimeDomainCount,
+ VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = TimeDomainEXTAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_fragment_shading_rate ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR(
+ uint32_t * pFragmentShadingRateCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <
+ typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PhysicalDeviceFragmentShadingRateKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ getFragmentShadingRatesKHR(
+ PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_tooling_info ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
getToolPropertiesEXT( uint32_t * pToolCount,
@@ -68777,129 +68946,99 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
- getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
- getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ //=== VK_NV_cooperative_matrix ===
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
- uint32_t * pVideoFormatPropertyCount,
- VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
+ VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV(
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
- getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = VideoFormatPropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type = 0>
+ typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = CooperativeMatrixPropertiesNVAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value,
+ int>::type = 0>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
- getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_NV_coverage_reduction_mode ===
-#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
- struct wl_display * display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
- struct wl_display & display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+ VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV(
+ uint32_t * pCombinationCount,
+ VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <
+ typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ getSupportedFramebufferMixedSamplesCombinationsNV(
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <
+ typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = FramebufferMixedSamplesCombinationNVAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ getSupportedFramebufferMixedSamplesCombinationsNV(
+ FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ //=== VK_EXT_full_screen_exclusive ===
-#if defined( VK_USE_PLATFORM_XCB_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
- xcb_connection_t * connection,
- xcb_visualid_t visual_id,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
- xcb_connection_t & connection,
- xcb_visualid_t visual_id,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PresentModeKHRAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ PresentModeKHRAllocator & presentModeKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#if defined( VK_USE_PLATFORM_XLIB_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display * dpy,
- VisualID visualID,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display & dpy,
- VisualID visualID,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_acquire_winrt_display ===
-#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- getRandROutputDisplayEXT( Display * dpy,
- RROutput rrOutput,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(
- Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- getRandROutputDisplayEXTUnique( Display & dpy,
- RROutput rrOutput,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
getWinrtDisplayNV( uint32_t deviceRelativeId,
@@ -68919,16 +69058,39 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ //=== VK_EXT_directfb_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
+ Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
+ IDirectFB * dfb,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
+ IDirectFB & dfb,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ //=== VK_QNX_screen_surface ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
+ struct _screen_window * window,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
+ struct _screen_window & window,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
{
@@ -80360,6 +80522,92 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
};
+ struct PhysicalDeviceGlobalPriorityQueryFeaturesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
+ StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesEXT(
+ VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {} ) VULKAN_HPP_NOEXCEPT
+ : globalPriorityQuery( globalPriorityQuery_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesEXT(
+ PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceGlobalPriorityQueryFeaturesEXT( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs )
+ VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceGlobalPriorityQueryFeaturesEXT(
+ *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeaturesEXT const *>( &rhs ) )
+ {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesEXT &
+ operator=( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceGlobalPriorityQueryFeaturesEXT &
+ operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceGlobalPriorityQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceGlobalPriorityQueryFeaturesEXT &
+ setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
+ {
+ globalPriorityQuery = globalPriorityQuery_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery );
+ }
+
+ bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {};
+ };
+ static_assert( sizeof( PhysicalDeviceGlobalPriorityQueryFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT ),
+ "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceGlobalPriorityQueryFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT>
+ {
+ using Type = PhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+ };
+
struct PhysicalDeviceGroupProperties
{
static const bool allowDuplicate = false;
@@ -86184,6 +86432,96 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
+ StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
+ VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
+ *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
+ {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
+ operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
+ operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this =
+ *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>(
+ &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow(
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
+ ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
+ }
+
+ bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType =
+ StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
+ };
+ static_assert( sizeof( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ),
+ "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
+ {
+ using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+ };
+
struct PhysicalDeviceShaderTerminateInvocationFeaturesKHR
{
static const bool allowDuplicate = false;
@@ -93336,7 +93674,7 @@ namespace VULKAN_HPP_NAMESPACE
{
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueueFamilyCheckpointProperties2Nv;
+ StructureType::eQueueFamilyCheckpointProperties2NV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(
@@ -93388,7 +93726,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2Nv;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask = {};
};
@@ -93398,7 +93736,7 @@ namespace VULKAN_HPP_NAMESPACE
"struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2Nv>
+ struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
{
using Type = QueueFamilyCheckpointProperties2NV;
};
@@ -93473,6 +93811,118 @@ namespace VULKAN_HPP_NAMESPACE
using Type = QueueFamilyCheckpointPropertiesNV;
};
+ struct QueueFamilyGlobalPriorityPropertiesEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
+ StructureType::eQueueFamilyGlobalPriorityPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT(
+ uint32_t priorityCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT, VK_MAX_GLOBAL_PRIORITY_SIZE_EXT> const &
+ priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow } } ) VULKAN_HPP_NOEXCEPT
+ : priorityCount( priorityCount_ )
+ , priorities( priorities_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT( QueueFamilyGlobalPriorityPropertiesEXT const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyGlobalPriorityPropertiesEXT( VkQueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : QueueFamilyGlobalPriorityPropertiesEXT(
+ *reinterpret_cast<QueueFamilyGlobalPriorityPropertiesEXT const *>( &rhs ) )
+ {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT &
+ operator=( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyGlobalPriorityPropertiesEXT &
+ operator=( VkQueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ QueueFamilyGlobalPriorityPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ QueueFamilyGlobalPriorityPropertiesEXT & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ priorityCount = priorityCount_;
+ return *this;
+ }
+
+ QueueFamilyGlobalPriorityPropertiesEXT & setPriorities(
+ std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT, VK_MAX_GLOBAL_PRIORITY_SIZE_EXT> priorities_ )
+ VULKAN_HPP_NOEXCEPT
+ {
+ priorities = priorities_;
+ return *this;
+ }
+
+ operator VkQueueFamilyGlobalPriorityPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkQueueFamilyGlobalPriorityPropertiesEXT *>( this );
+ }
+
+ operator VkQueueFamilyGlobalPriorityPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueueFamilyGlobalPriorityPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( QueueFamilyGlobalPriorityPropertiesEXT const & ) const = default;
+#else
+ bool operator==( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) &&
+ ( priorities == rhs.priorities );
+ }
+
+ bool operator!=( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesEXT;
+ void * pNext = {};
+ uint32_t priorityCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT, VK_MAX_GLOBAL_PRIORITY_SIZE_EXT>
+ priorities = {};
+ };
+ static_assert( sizeof( QueueFamilyGlobalPriorityPropertiesEXT ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesEXT ),
+ "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueueFamilyGlobalPriorityPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityPropertiesEXT>
+ {
+ using Type = QueueFamilyGlobalPriorityPropertiesEXT;
+ };
+
struct RenderPassAttachmentBeginInfo
{
static const bool allowDuplicate = false;
@@ -101020,103 +101470,97 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_VERSION_1_0 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT(
- const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
+ enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PhysicalDeviceAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
+ enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(
- const DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ PFN_vkVoidFunction
+ getProcAddr( const char * pName,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
- createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ PFN_vkVoidFunction
+ getProcAddr( const std::string & name,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups(
+ uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PhysicalDeviceGroupPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(
- const DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
- createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ void
+ destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_display ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR(
@@ -101142,118 +101586,130 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+ //=== VK_KHR_xlib_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+ //=== VK_KHR_xcb_surface ===
-#if defined( VK_USE_PLATFORM_IOS_MVK )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
+ createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo,
+ createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo,
+ createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+ //=== VK_KHR_wayland_surface ===
-#if defined( VK_USE_PLATFORM_FUCHSIA )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_KHR_android_surface ===
-#if defined( VK_USE_PLATFORM_MACOS_MVK )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_win32_surface ===
-#if defined( VK_USE_PLATFORM_METAL_EXT )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
+ createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo,
+ createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
@@ -101261,41 +101717,87 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo,
+ createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_EXT_debug_report ===
-#if defined( VK_USE_PLATFORM_SCREEN_QNX )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT(
+ const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(
+ const DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
+ createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyDebugReportCallbackEXT(
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
+ uint64_t object,
+ size_t location,
+ int32_t messageCode,
+ const char * pLayerPrefix,
+ const char * pMessage,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
+ uint64_t object,
+ size_t location,
+ int32_t messageCode,
+ const std::string & layerPrefix,
+ const std::string & message,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_GGP )
+ //=== VK_GGP_stream_descriptor_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP(
const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
@@ -101322,6 +101824,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_GGP*/
#if defined( VK_USE_PLATFORM_VI_NN )
+ //=== VK_NN_vi_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
@@ -101346,153 +101850,109 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_VI_NN*/
-#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+ //=== VK_KHR_device_group_creation ===
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR(
+ uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B = PhysicalDeviceGroupPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
+ int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+ //=== VK_MVK_ios_surface ===
-#if defined( VK_USE_PLATFORM_XCB_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
+ createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo,
+ createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo,
+ createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo,
Optional<const AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+ //=== VK_MVK_macos_surface ===
-#if defined( VK_USE_PLATFORM_XLIB_KHR )
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char * pLayerPrefix,
- const char * pMessage,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const std::string & layerPrefix,
- const std::string & message,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_debug_utils ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT(
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT(
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(
+ const DebugUtilsMessengerCreateInfoEXT & createInfo,
Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
+ createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
@@ -101519,125 +101979,157 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const DebugUtilsMessengerCallbackDataEXT & callbackData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
+ VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_imagepipe_surface ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceGroupPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceGroupPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_headless_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result
- enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
- VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
+ createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator,
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ //=== VK_EXT_directfb_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const char * pName,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const std::string & name,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ //=== VK_QNX_screen_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ VULKAN_HPP_NODISCARD Result
+ createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const DebugUtilsMessengerCallbackDataEXT & callbackData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo,
+ Optional<const AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
{
@@ -101685,6 +102177,8 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
+ //=== VK_VERSION_1_0 ===
+
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
class UniqueHandleTraits<Instance, Dispatch>
@@ -101759,6 +102253,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_1 ===
+
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result enumerateInstanceVersion(
uint32_t * pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
@@ -101768,6 +102264,12 @@ namespace VULKAN_HPP_NAMESPACE
enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //===========================
+ //=== COMMAND Definitions ===
+ //===========================
+
+ //=== VK_VERSION_1_0 ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
@@ -101819,6 +102321,370 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyInstance( m_instance,
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkEnumeratePhysicalDevices(
+ m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
+ Instance::enumeratePhysicalDevices( Dispatch const & d ) const
+ {
+ std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
+ uint32_t physicalDeviceCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ {
+ physicalDevices.resize( physicalDeviceCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
+ m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
+ VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
+ {
+ physicalDevices.resize( physicalDeviceCount );
+ }
+ return createResultValue(
+ result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ }
+
+ template <typename PhysicalDeviceAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
+ Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
+ {
+ std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
+ uint32_t physicalDeviceCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ {
+ physicalDevices.resize( physicalDeviceCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
+ m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
+ VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
+ {
+ physicalDevices.resize( physicalDeviceCount );
+ }
+ return createResultValue(
+ result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
+ PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+ d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
+ return features;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetPhysicalDeviceFormatProperties(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
+ PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+ d.vkGetPhysicalDeviceFormatProperties(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+ return formatProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
+ PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
+ return createResultValue(
+ result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
+ PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+ d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice,
+ pQueueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
+ {
+ std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ return queueFamilyProperties;
+ }
+
+ template <typename QueueFamilyPropertiesAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties(
+ queueFamilyPropertiesAllocator );
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ return queueFamilyProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
+ PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+ d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+ return memoryProperties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetInstanceProcAddr( m_instance, pName );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeviceProcAddr( m_device, pName );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeviceProcAddr( m_device, name.c_str() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Device * pDevice,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
+ reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDevice *>( pDevice ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
+ PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::Device device;
+ Result result = static_cast<Result>(
+ d.vkCreateDevice( m_physicalDevice,
+ reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDevice *>( &device ) ) );
+ return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
+ PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::Device device;
+ Result result = static_cast<Result>(
+ d.vkCreateDevice( m_physicalDevice,
+ reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDevice *>( &device ) ) );
+ ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>(
+ result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDevice( m_device,
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
enumerateInstanceExtensionProperties( const char * pLayerName,
uint32_t * pPropertyCount,
@@ -101898,6 +102764,87 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
+ m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename ExtensionPropertiesAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
+ Dispatch const & d ) const
+ {
+ std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
+ m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>(
+ d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
+ layerName ? layerName->c_str() : nullptr,
+ &propertyCount,
+ reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ }
+
+ template <typename ExtensionPropertiesAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
+ ExtensionPropertiesAllocator & extensionPropertiesAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
+ m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>(
+ d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
+ layerName ? layerName->c_str() : nullptr,
+ &propertyCount,
+ reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
Dispatch const & d ) VULKAN_HPP_NOEXCEPT
@@ -101964,2326 +102911,3597 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion,
- Dispatch const & d ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
+ return static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
+ m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
+ template <typename LayerPropertiesAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
{
- uint32_t apiVersion;
- Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
- return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
+ std::vector<LayerProperties, LayerPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ }
+
+ template <typename LayerPropertiesAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin(
- const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex,
+ uint32_t queueIndex,
+ VULKAN_HPP_NAMESPACE::Queue * pQueue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
+ d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
+ Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>(
- d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+ VULKAN_HPP_NAMESPACE::Queue queue;
+ d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
+ return queue;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount,
+ const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginConditionalRenderingEXT(
- m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
+ return static_cast<Result>( d.vkQueueSubmit(
+ m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const
{
- d.vkCmdBeginConditionalRenderingEXT(
- m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
+ Result result = static_cast<Result>( d.vkQueueSubmit( m_queue,
+ submits.size(),
+ reinterpret_cast<const VkSubmitInfo *>( submits.data() ),
+ static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+ return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Queue::waitIdle( Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+ return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::waitIdle( Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginQuery(
- m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ return static_cast<Result>( d.vkAllocateMemory( m_device,
+ reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- uint32_t index,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
+ Device::allocateMemory( const MemoryAllocateInfo & allocateInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdBeginQueryIndexedEXT(
- m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+ Result result = static_cast<Result>(
+ d.vkAllocateMemory( m_device,
+ reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
+ return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
+ Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdBeginRenderPass( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
- static_cast<VkSubpassContents>( contents ) );
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+ Result result = static_cast<Result>(
+ d.vkAllocateMemory( m_device,
+ reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
+ ObjectFree<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>(
+ result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkFreeMemory(
+ m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginRenderPass( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
- static_cast<VkSubpassContents>( contents ) );
+ d.vkFreeMemory( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginRenderPass2( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+ d.vkFreeMemory(
+ m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin,
- const SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginRenderPass2( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+ d.vkFreeMemory( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
+ void ** ppData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+ return static_cast<Result>( d.vkMapMemory( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkDeviceSize>( size ),
+ static_cast<VkMemoryMapFlags>( flags ),
+ ppData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin,
- const SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void *>::type
+ Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
+ Dispatch const & d ) const
{
- d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+ void * pData;
+ Result result = static_cast<Result>( d.vkMapMemory( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkDeviceSize>( size ),
+ static_cast<VkMemoryMapFlags>( flags ),
+ &pData ) );
+ return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
- firstCounterBuffer,
- counterBufferCount,
- reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
- reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
+ d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT(
- uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
-# else
- if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
- {
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ return static_cast<Result>( d.vkFlushMappedMemoryRanges(
+ m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ }
- d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
- firstCounterBuffer,
- counterBuffers.size(),
- reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges(
+ m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
+ return static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
+ m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::invalidateMappedMemoryRanges(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
{
- d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
+ Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
+ m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t * pDynamicOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindDescriptorSets( m_commandBuffer,
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipelineLayout>( layout ),
- firstSet,
- descriptorSetCount,
- reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
- dynamicOffsetCount,
- pDynamicOffsets );
+ d.vkGetDeviceMemoryCommitment(
+ m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- ArrayProxy<const uint32_t> const & dynamicOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
+ Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindDescriptorSets( m_commandBuffer,
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipelineLayout>( layout ),
- firstSet,
- descriptorSets.size(),
- reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
- dynamicOffsets.size(),
- dynamicOffsets.data() );
+ VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
+ d.vkGetDeviceMemoryCommitment(
+ m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+ return committedMemoryInBytes;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::IndexType indexType,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindIndexBuffer( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkIndexType>( indexType ) );
+ return static_cast<Result>( d.vkBindBufferMemory( m_device,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( memoryOffset ) ) );
}
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindBufferMemory( m_device,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( memoryOffset ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindPipeline(
- m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ return static_cast<Result>( d.vkBindImageMemory( m_device,
+ static_cast<VkImage>( image ),
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( memoryOffset ) ) );
}
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindImageMemory( m_device,
+ static_cast<VkImage>( image ),
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( memoryOffset ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t groupIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer,
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipeline>( pipeline ),
- groupIndex );
+ d.vkGetBufferMemoryRequirements(
+ m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
+ Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindShadingRateImageNV(
- m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+ d.vkGetBufferMemoryRequirements(
+ m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+ return memoryRequirements;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
- uint32_t bindingCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
- firstBinding,
- bindingCount,
- reinterpret_cast<const VkBuffer *>( pBuffers ),
- reinterpret_cast<const VkDeviceSize *>( pOffsets ),
- reinterpret_cast<const VkDeviceSize *>( pSizes ) );
+ d.vkGetImageMemoryRequirements(
+ m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
+ Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
- VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
-# else
- if ( buffers.size() != offsets.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
- }
- if ( !sizes.empty() && buffers.size() != sizes.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
-
- d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
- firstBinding,
- buffers.size(),
- reinterpret_cast<const VkBuffer *>( buffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
- reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+ d.vkGetImageMemoryRequirements(
+ m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+ return memoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
- uint32_t bindingCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements(
+ VULKAN_HPP_NAMESPACE::Image image,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindVertexBuffers( m_commandBuffer,
- firstBinding,
- bindingCount,
- reinterpret_cast<const VkBuffer *>( pBuffers ),
- reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+ d.vkGetImageSparseMemoryRequirements(
+ m_device,
+ static_cast<VkImage>( image ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
-# else
- if ( buffers.size() != offsets.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements(
+ m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements(
+ m_device,
+ static_cast<VkImage>( image ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ return sparseMemoryRequirements;
+ }
- d.vkCmdBindVertexBuffers( m_commandBuffer,
- firstBinding,
- buffers.size(),
- reinterpret_cast<const VkBuffer *>( buffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+ template <
+ typename SparseImageMemoryRequirementsAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ Device::getImageSparseMemoryRequirements(
+ VULKAN_HPP_NAMESPACE::Image image,
+ SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
+ sparseImageMemoryRequirementsAllocator );
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements(
+ m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements(
+ m_device,
+ static_cast<VkImage>( image ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ return sparseMemoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
- uint32_t bindingCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
- firstBinding,
- bindingCount,
- reinterpret_cast<const VkBuffer *>( pBuffers ),
- reinterpret_cast<const VkDeviceSize *>( pOffsets ),
- reinterpret_cast<const VkDeviceSize *>( pSizes ),
- reinterpret_cast<const VkDeviceSize *>( pStrides ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ pPropertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ Dispatch const & d ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
- VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
- VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
-# else
- if ( buffers.size() != offsets.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
- }
- if ( !sizes.empty() && buffers.size() != sizes.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
- }
- if ( !strides.empty() && buffers.size() != strides.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ return properties;
+ }
- d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
- firstBinding,
- buffers.size(),
- reinterpret_cast<const VkBuffer *>( buffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
- reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
- reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
+ template <
+ typename SparseImageFormatPropertiesAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ PhysicalDevice::getSparseImageFormatProperties(
+ VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties(
+ sparseImageFormatPropertiesAllocator );
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
- VULKAN_HPP_NAMESPACE::Filter filter,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Queue::bindSparse( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBlitImage( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regionCount,
- reinterpret_cast<const VkImageBlit *>( pRegions ),
- static_cast<VkFilter>( filter ) );
+ return static_cast<Result>( d.vkQueueBindSparse( m_queue,
+ bindInfoCount,
+ reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ),
+ static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
- VULKAN_HPP_NAMESPACE::Filter filter,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const
{
- d.vkCmdBlitImage( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regions.size(),
- reinterpret_cast<const VkImageBlit *>( regions.data() ),
- static_cast<VkFilter>( filter ) );
+ Result result =
+ static_cast<Result>( d.vkQueueBindSparse( m_queue,
+ bindInfo.size(),
+ reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ),
+ static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( pBlitImageInfo ) );
+ return static_cast<Result>( d.vkCreateFence( m_device,
+ reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkFence *>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ Device::createFence( const FenceCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( &blitImageInfo ) );
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>(
+ d.vkCreateFence( m_device,
+ reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) ) );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ Device::createFenceUnique( const FenceCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
- reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
- static_cast<VkBuffer>( instanceData ),
- static_cast<VkDeviceSize>( instanceOffset ),
- static_cast<VkBool32>( update ),
- static_cast<VkAccelerationStructureNV>( dst ),
- static_cast<VkAccelerationStructureNV>( src ),
- static_cast<VkBuffer>( scratch ),
- static_cast<VkDeviceSize>( scratchOffset ) );
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>(
+ d.vkCreateFence( m_device,
+ reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
+ result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyFence(
+ m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
- reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
- static_cast<VkBuffer>( instanceData ),
- static_cast<VkDeviceSize>( instanceOffset ),
- static_cast<VkBool32>( update ),
- static_cast<VkAccelerationStructureNV>( dst ),
- static_cast<VkAccelerationStructureNV>( src ),
- static_cast<VkBuffer>( scratch ),
- static_cast<VkDeviceSize>( scratchOffset ) );
+ d.vkDestroyFence( m_device,
+ static_cast<VkFence>( fence ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
- const uint32_t * pIndirectStrides,
- const uint32_t * const * ppMaxPrimitiveCounts,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBuildAccelerationStructuresIndirectKHR(
- m_commandBuffer,
- infoCount,
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
- reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
- pIndirectStrides,
- ppMaxPrimitiveCounts );
+ d.vkDestroyFence(
+ m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
- ArrayProxy<const uint32_t> const & indirectStrides,
- ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
- VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
- VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
-# else
- if ( infos.size() != indirectDeviceAddresses.size() )
- {
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
- }
- if ( infos.size() != indirectStrides.size() )
- {
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
- }
- if ( infos.size() != pMaxPrimitiveCounts.size() )
- {
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
-
- d.vkCmdBuildAccelerationStructuresIndirectKHR(
- m_commandBuffer,
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
- indirectStrides.data(),
- pMaxPrimitiveCounts.data() );
+ d.vkDestroyFence( m_device,
+ static_cast<VkFence>( fence ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount,
+ const VULKAN_HPP_NAMESPACE::Fence * pFences,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBuildAccelerationStructuresKHR(
- m_commandBuffer,
- infoCount,
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
+ return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
-# else
- if ( infos.size() != pBuildRangeInfos.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ Result result = static_cast<Result>(
+ d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- d.vkCmdBuildAccelerationStructuresKHR(
- m_commandBuffer,
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount,
- const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
- uint32_t rectCount,
- const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount,
+ const VULKAN_HPP_NAMESPACE::Fence * pFences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdClearAttachments( m_commandBuffer,
- attachmentCount,
- reinterpret_cast<const VkClearAttachment *>( pAttachments ),
- rectCount,
- reinterpret_cast<const VkClearRect *>( pRects ) );
+ return static_cast<Result>( d.vkWaitForFences(
+ m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d ) const
{
- d.vkCmdClearAttachments( m_commandBuffer,
- attachments.size(),
- reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
- rects.size(),
- reinterpret_cast<const VkClearRect *>( rects.data() ) );
+ Result result = static_cast<Result>( d.vkWaitForFences( m_device,
+ fences.size(),
+ reinterpret_cast<const VkFence *>( fences.data() ),
+ static_cast<VkBool32>( waitAll ),
+ timeout ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
- uint32_t rangeCount,
- const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdClearColorImage( m_commandBuffer,
- static_cast<VkImage>( image ),
- static_cast<VkImageLayout>( imageLayout ),
- reinterpret_cast<const VkClearColorValue *>( pColor ),
- rangeCount,
- reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+ return static_cast<Result>( d.vkCreateSemaphore( m_device,
+ reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const ClearColorValue & color,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
+ Device::createSemaphore( const SemaphoreCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdClearColorImage( m_commandBuffer,
- static_cast<VkImage>( image ),
- static_cast<VkImageLayout>( imageLayout ),
- reinterpret_cast<const VkClearColorValue *>( &color ),
- ranges.size(),
- reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+ Result result = static_cast<Result>(
+ d.vkCreateSemaphore( m_device,
+ reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
+ return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
- uint32_t rangeCount,
- const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
+ Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdClearDepthStencilImage( m_commandBuffer,
- static_cast<VkImage>( image ),
- static_cast<VkImageLayout>( imageLayout ),
- reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
- rangeCount,
- reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+ Result result = static_cast<Result>(
+ d.vkCreateSemaphore( m_device,
+ reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>(
+ result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroySemaphore(
+ m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const ClearDepthStencilValue & depthStencil,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdClearDepthStencilImage( m_commandBuffer,
- static_cast<VkImage>( image ),
- static_cast<VkImageLayout>( imageLayout ),
- reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
- ranges.size(),
- reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+ d.vkDestroySemaphore( m_device,
+ static_cast<VkSemaphore>( semaphore ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdControlVideoCodingKHR( m_commandBuffer,
- reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
+ d.vkDestroySemaphore(
+ m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdControlVideoCodingKHR( m_commandBuffer,
- reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
+ d.vkDestroySemaphore( m_device,
+ static_cast<VkSemaphore>( semaphore ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Event * pEvent,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
+ return static_cast<Result>( d.vkCreateEvent( m_device,
+ reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkEvent *>( pEvent ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
+ Device::createEvent( const EventCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+ VULKAN_HPP_NAMESPACE::Event event;
+ Result result = static_cast<Result>(
+ d.vkCreateEvent( m_device,
+ reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkEvent *>( &event ) ) );
+ return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
+ Device::createEventUnique( const EventCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
- static_cast<VkAccelerationStructureNV>( dst ),
- static_cast<VkAccelerationStructureNV>( src ),
- static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
+ VULKAN_HPP_NAMESPACE::Event event;
+ Result result = static_cast<Result>(
+ d.vkCreateEvent( m_device,
+ reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkEvent *>( &event ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>(
+ result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyAccelerationStructureToMemoryKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
+ d.vkDestroyEvent(
+ m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyAccelerationStructureToMemoryKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+ d.vkDestroyEvent( m_device,
+ static_cast<VkEvent>( event ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyBuffer( m_commandBuffer,
- static_cast<VkBuffer>( srcBuffer ),
- static_cast<VkBuffer>( dstBuffer ),
- regionCount,
- reinterpret_cast<const VkBufferCopy *>( pRegions ) );
+ d.vkDestroyEvent(
+ m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyBuffer( m_commandBuffer,
- static_cast<VkBuffer>( srcBuffer ),
- static_cast<VkBuffer>( dstBuffer ),
- regions.size(),
- reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
+ d.vkDestroyEvent( m_device,
+ static_cast<VkEvent>( event ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( pCopyBufferInfo ) );
+ return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d ) const
{
- d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( &copyBufferInfo ) );
+ Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyBufferToImage( m_commandBuffer,
- static_cast<VkBuffer>( srcBuffer ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regionCount,
- reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+ return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
{
- d.vkCmdCopyBufferToImage( m_commandBuffer,
- static_cast<VkBuffer>( srcBuffer ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regions.size(),
- reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+ Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR(
- const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( pCopyBufferToImageInfo ) );
+ return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d ) const
{
- d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( &copyBufferToImageInfo ) );
+ Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyImage( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regionCount,
- reinterpret_cast<const VkImageCopy *>( pRegions ) );
+ return static_cast<Result>( d.vkCreateQueryPool( m_device,
+ reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
+ Device::createQueryPool( const QueryPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdCopyImage( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regions.size(),
- reinterpret_cast<const VkImageCopy *>( regions.data() ) );
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+ Result result = static_cast<Result>(
+ d.vkCreateQueryPool( m_device,
+ reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
+ return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
+ Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+ Result result = static_cast<Result>(
+ d.vkCreateQueryPool( m_device,
+ reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>(
+ result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( pCopyImageInfo ) );
+ d.vkDestroyQueryPool(
+ m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( &copyImageInfo ) );
+ d.vkDestroyQueryPool( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyImageToBuffer( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkBuffer>( dstBuffer ),
- regionCount,
- reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+ d.vkDestroyQueryPool(
+ m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyImageToBuffer( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkBuffer>( dstBuffer ),
- regions.size(),
- reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+ d.vkDestroyQueryPool( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR(
- const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ size_t dataSize,
+ void * pData,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( pCopyImageToBufferInfo ) );
+ return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ dataSize,
+ pData,
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ ArrayProxy<T> const & data,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const
{
- d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( &copyImageToBufferInfo ) );
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) ) );
+ return createResultValue(
+ result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+ }
+
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T, Allocator>>
+ Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ size_t dataSize,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T, Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) ) );
+ return createResultValue( result,
+ data,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+ }
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T>
+ Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const
+ {
+ T data;
+ Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ sizeof( T ),
+ reinterpret_cast<void *>( &data ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) ) );
+ return createResultValue( result,
+ data,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCopyMemoryToAccelerationStructureKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
+ return static_cast<Result>( d.vkCreateBuffer( m_device,
+ reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkBuffer *>( pBuffer ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
+ Device::createBuffer( const BufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdCopyMemoryToAccelerationStructureKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+ VULKAN_HPP_NAMESPACE::Buffer buffer;
+ Result result = static_cast<Result>(
+ d.vkCreateBuffer( m_device,
+ reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBuffer *>( &buffer ) ) );
+ return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
+ Device::createBufferUnique( const BufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdCopyQueryPoolResults( m_commandBuffer,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) );
+ VULKAN_HPP_NAMESPACE::Buffer buffer;
+ Result result = static_cast<Result>(
+ d.vkCreateBuffer( m_device,
+ reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBuffer *>( &buffer ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>(
+ result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
+ d.vkDestroyBuffer(
+ m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
+ d.vkDestroyBuffer( m_device,
+ static_cast<VkBuffer>( buffer ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+ d.vkDestroyBuffer(
+ m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+ d.vkDestroyBuffer( m_device,
+ static_cast<VkBuffer>( buffer ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::BufferView * pView,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ return static_cast<Result>( d.vkCreateBufferView( m_device,
+ reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkBufferView *>( pView ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
+ Device::createBufferView( const BufferViewCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+ VULKAN_HPP_NAMESPACE::BufferView view;
+ Result result = static_cast<Result>(
+ d.vkCreateBufferView( m_device,
+ reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBufferView *>( &view ) ) );
+ return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
+ Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::BufferView view;
+ Result result = static_cast<Result>(
+ d.vkCreateBufferView( m_device,
+ reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBufferView *>( &view ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>(
+ result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyBufferView( m_device,
+ static_cast<VkBufferView>( bufferView ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+ d.vkDestroyBufferView( m_device,
+ static_cast<VkBufferView>( bufferView ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pFrameInfo ) );
+ d.vkDestroyBufferView( m_device,
+ static_cast<VkBufferView>( bufferView ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) );
+ d.vkDestroyBufferView( m_device,
+ static_cast<VkBufferView>( bufferView ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Image * pImage,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ return static_cast<Result>( d.vkCreateImage( m_device,
+ reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkImage *>( pImage ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
+ Device::createImage( const ImageCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ VULKAN_HPP_NAMESPACE::Image image;
+ Result result = static_cast<Result>(
+ d.vkCreateImage( m_device,
+ reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImage *>( &image ) ) );
+ return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
+ Device::createImageUnique( const ImageCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdDispatchBaseKHR(
- m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ VULKAN_HPP_NAMESPACE::Image image;
+ Result result = static_cast<Result>(
+ d.vkCreateImage( m_device,
+ reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImage *>( &image ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>(
+ result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+ d.vkDestroyImage(
+ m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+ d.vkDestroyImage( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+ d.vkDestroyImage(
+ m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndexedIndirect(
- m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkDestroyImage( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ d.vkGetImageSubresourceLayout( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource *>( pSubresource ),
+ reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
+ Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
+ const ImageSubresource & subresource,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
+ d.vkGetImageSubresourceLayout( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout *>( &layout ) );
+ return layout;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ImageView * pView,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ return static_cast<Result>( d.vkCreateImageView( m_device,
+ reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkImageView *>( pView ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
+ Device::createImageView( const ImageViewCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdDrawIndirect(
- m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ VULKAN_HPP_NAMESPACE::ImageView view;
+ Result result = static_cast<Result>(
+ d.vkCreateImageView( m_device,
+ reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImageView *>( &view ) ) );
+ return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
- uint32_t firstInstance,
- VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
+ Device::createImageViewUnique( const ImageViewCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
- instanceCount,
- firstInstance,
- static_cast<VkBuffer>( counterBuffer ),
- static_cast<VkDeviceSize>( counterBufferOffset ),
- counterOffset,
- vertexStride );
+ VULKAN_HPP_NAMESPACE::ImageView view;
+ Result result = static_cast<Result>(
+ d.vkCreateImageView( m_device,
+ reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImageView *>( &view ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>(
+ result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndirectCount( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ d.vkDestroyImageView(
+ m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ d.vkDestroyImageView( m_device,
+ static_cast<VkImageView>( imageView ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ d.vkDestroyImageView(
+ m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkBuffer>( countBuffer ),
- static_cast<VkDeviceSize>( countBufferOffset ),
- maxDrawCount,
- stride );
+ d.vkDestroyImageView( m_device,
+ static_cast<VkImageView>( imageView ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdDrawMeshTasksIndirectNV(
- m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ return static_cast<Result>(
+ d.vkCreateShaderModule( m_device,
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount,
- uint32_t firstTask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
+ Device::createShaderModule( const ShaderModuleCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
+ VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+ Result result = static_cast<Result>(
+ d.vkCreateShaderModule( m_device,
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
+ return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
}
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
+ Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
+ VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+ Result result = static_cast<Result>(
+ d.vkCreateShaderModule( m_device,
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>(
+ result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
+ d.vkDestroyShaderModule( m_device,
+ static_cast<VkShaderModule>( shaderModule ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
+ d.vkDestroyShaderModule( m_device,
+ static_cast<VkShaderModule>( shaderModule ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+ d.vkDestroyShaderModule( m_device,
+ static_cast<VkShaderModule>( shaderModule ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+ d.vkDestroyShaderModule( m_device,
+ static_cast<VkShaderModule>( shaderModule ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- uint32_t index,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+ return static_cast<Result>(
+ d.vkCreatePipelineCache( m_device,
+ reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
+ Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdEndRenderPass( m_commandBuffer );
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+ Result result = static_cast<Result>(
+ d.vkCreatePipelineCache( m_device,
+ reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
+ return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
+ Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+ Result result = static_cast<Result>(
+ d.vkCreatePipelineCache( m_device,
+ reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>(
+ result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyPipelineCache( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ d.vkDestroyPipelineCache( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkDestroyPipelineCache( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ d.vkDestroyPipelineCache( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
- firstCounterBuffer,
- counterBufferCount,
- reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
- reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
+ return static_cast<Result>(
+ d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT(
- uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ template <typename Uint8_tAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
-# else
- if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+ std::vector<uint8_t, Uint8_tAllocator> data;
+ size_t dataSize;
+ Result result;
+ do
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ result = static_cast<Result>(
+ d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
+ if ( ( result == Result::eSuccess ) && dataSize )
+ {
+ data.resize( dataSize );
+ result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ &dataSize,
+ reinterpret_cast<void *>( data.data() ) ) );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
+ {
+ data.resize( dataSize );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+ }
- d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
- firstCounterBuffer,
- counterBuffers.size(),
- reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+ template <typename Uint8_tAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Uint8_tAllocator & uint8_tAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
+ size_t dataSize;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
+ if ( ( result == Result::eSuccess ) && dataSize )
+ {
+ data.resize( dataSize );
+ result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ &dataSize,
+ reinterpret_cast<void *>( data.data() ) ) );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
+ {
+ data.resize( dataSize );
+ }
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
+ return static_cast<Result>( d.vkMergePipelineCaches( m_device,
+ static_cast<VkPipelineCache>( dstCache ),
+ srcCacheCount,
+ reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
+ Dispatch const & d ) const
{
- d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
+ Result result =
+ static_cast<Result>( d.vkMergePipelineCaches( m_device,
+ static_cast<VkPipelineCache>( dstCache ),
+ srcCaches.size(),
+ reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdExecuteCommands(
- m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ return static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createGraphicsPipelines(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue(
+ result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+ template <typename PipelineAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createGraphicsPipelines(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+ Result result = static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue(
+ result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
+ Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ return createResultValue(
+ result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch, typename PipelineAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createGraphicsPipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result,
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+ template <
+ typename Dispatch,
+ typename PipelineAllocator,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createGraphicsPipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result,
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
+ Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<Pipeline, Dispatch>(
+ result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
+ deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdExecuteCommands(
- m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+ return static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PipelineAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createComputePipelines(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue(
+ result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+ template <typename PipelineAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createComputePipelines(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+ Result result = static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue(
+ result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV(
- VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
+ Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
- static_cast<VkBool32>( isPreprocessed ),
- reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ return createResultValue(
+ result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch, typename PipelineAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createComputePipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result,
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+ template <
+ typename Dispatch,
+ typename PipelineAllocator,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createComputePipelinesUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result,
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
+ Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<Pipeline, Dispatch>(
+ result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
+ deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyPipeline(
+ m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
- static_cast<VkBool32>( isPreprocessed ),
- reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+ d.vkDestroyPipeline( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- uint32_t data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdFillBuffer( m_commandBuffer,
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- static_cast<VkDeviceSize>( size ),
- data );
+ d.vkDestroyPipeline(
+ m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+ d.vkDestroyPipeline( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkCreatePipelineLayout( m_device,
+ reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
+ Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+ Result result = static_cast<Result>(
+ d.vkCreatePipelineLayout( m_device,
+ reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
+ return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
+ Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+ Result result = static_cast<Result>(
+ d.vkCreatePipelineLayout( m_device,
+ reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>(
+ result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdNextSubpass2( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkDestroyPipelineLayout( m_device,
+ static_cast<VkPipelineLayout>( pipelineLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo,
- const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdNextSubpass2( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ d.vkDestroyPipelineLayout( m_device,
+ static_cast<VkPipelineLayout>( pipelineLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdNextSubpass2KHR( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkDestroyPipelineLayout( m_device,
+ static_cast<VkPipelineLayout>( pipelineLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo,
- const SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdNextSubpass2KHR( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ d.vkDestroyPipelineLayout( m_device,
+ static_cast<VkPipelineLayout>( pipelineLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Sampler * pSampler,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPipelineBarrier( m_commandBuffer,
- static_cast<VkPipelineStageFlags>( srcStageMask ),
- static_cast<VkPipelineStageFlags>( dstStageMask ),
- static_cast<VkDependencyFlags>( dependencyFlags ),
- memoryBarrierCount,
- reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
- bufferMemoryBarrierCount,
- reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
- imageMemoryBarrierCount,
- reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+ return static_cast<Result>( d.vkCreateSampler( m_device,
+ reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSampler *>( pSampler ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
+ Device::createSampler( const SamplerCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdPipelineBarrier( m_commandBuffer,
- static_cast<VkPipelineStageFlags>( srcStageMask ),
- static_cast<VkPipelineStageFlags>( dstStageMask ),
- static_cast<VkDependencyFlags>( dependencyFlags ),
- memoryBarriers.size(),
- reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
- bufferMemoryBarriers.size(),
- reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
- imageMemoryBarriers.size(),
- reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+ VULKAN_HPP_NAMESPACE::Sampler sampler;
+ Result result = static_cast<Result>(
+ d.vkCreateSampler( m_device,
+ reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSampler *>( &sampler ) ) );
+ return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
+ Device::createSamplerUnique( const SamplerCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
+ VULKAN_HPP_NAMESPACE::Sampler sampler;
+ Result result = static_cast<Result>(
+ d.vkCreateSampler( m_device,
+ reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSampler *>( &sampler ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>(
+ result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroySampler(
+ m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
+ d.vkDestroySampler( m_device,
+ static_cast<VkSampler>( sampler ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPreprocessGeneratedCommandsNV(
- m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+ d.vkDestroySampler(
+ m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPreprocessGeneratedCommandsNV(
- m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+ d.vkDestroySampler( m_device,
+ static_cast<VkSampler>( sampler ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void * pValues,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPushConstants( m_commandBuffer,
- static_cast<VkPipelineLayout>( layout ),
- static_cast<VkShaderStageFlags>( stageFlags ),
- offset,
- size,
- pValues );
+ return static_cast<Result>(
+ d.vkCreateDescriptorSetLayout( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- ArrayProxy<const T> const & values,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
+ Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdPushConstants( m_commandBuffer,
- static_cast<VkPipelineLayout>( layout ),
- static_cast<VkShaderStageFlags>( stageFlags ),
- offset,
- values.size() * sizeof( T ),
- reinterpret_cast<const void *>( values.data() ) );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+ Result result = static_cast<Result>(
+ d.vkCreateDescriptorSetLayout( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
+ return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
+ Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+ Result result = static_cast<Result>(
+ d.vkCreateDescriptorSetLayout( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>(
+ result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipelineLayout>( layout ),
- set,
- descriptorWriteCount,
- reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
+ d.vkDestroyDescriptorSetLayout( m_device,
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipelineLayout>( layout ),
- set,
- descriptorWrites.size(),
- reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
+ d.vkDestroyDescriptorSetLayout( m_device,
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- const void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- static_cast<VkPipelineLayout>( layout ),
- set,
- pData );
+ d.vkDestroyDescriptorSetLayout( m_device,
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ d.vkDestroyDescriptorSetLayout( m_device,
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdResetEvent2KHR(
- m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2KHR>( stageMask ) );
+ return static_cast<Result>(
+ d.vkCreateDescriptorPool( m_device,
+ reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
+ Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+ Result result = static_cast<Result>(
+ d.vkCreateDescriptorPool( m_device,
+ reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
+ return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- uint32_t regionCount,
- const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
+ Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdResolveImage( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regionCount,
- reinterpret_cast<const VkImageResolve *>( pRegions ) );
+ VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+ Result result = static_cast<Result>(
+ d.vkCreateDescriptorPool( m_device,
+ reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>(
+ result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDescriptorPool( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdResolveImage( m_commandBuffer,
- static_cast<VkImage>( srcImage ),
- static_cast<VkImageLayout>( srcImageLayout ),
- static_cast<VkImage>( dstImage ),
- static_cast<VkImageLayout>( dstImageLayout ),
- regions.size(),
- reinterpret_cast<const VkImageResolve *>( regions.data() ) );
+ d.vkDestroyDescriptorPool( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( pResolveImageInfo ) );
+ d.vkDestroyDescriptorPool( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( &resolveImageInfo ) );
+ d.vkDestroyDescriptorPool( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ return static_cast<Result>( d.vkResetDescriptorPool(
+ m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
}
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkResetDescriptorPool(
+ m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
+ return static_cast<Result>(
+ d.vkAllocateDescriptorSets( m_device,
+ reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ),
+ reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DescriptorSetAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+ {
+ std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
+ Result result = static_cast<Result>(
+ d.vkAllocateDescriptorSets( m_device,
+ reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
+ return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+ }
+
+ template <typename DescriptorSetAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
+ DescriptorSetAllocator & descriptorSetAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount,
+ descriptorSetAllocator );
+ Result result = static_cast<Result>(
+ d.vkAllocateDescriptorSets( m_device,
+ reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
+ return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch, typename DescriptorSetAllocator>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
+ std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ Result result = static_cast<Result>(
+ d.vkAllocateDescriptorSets( m_device,
+ reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+ PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+ for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
+ {
+ uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
}
+ template <typename Dispatch,
+ typename DescriptorSetAllocator,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
+ int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
+ DescriptorSetAllocator & descriptorSetAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets(
+ descriptorSetAllocator );
+ std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ Result result = static_cast<Result>(
+ d.vkAllocateDescriptorSets( m_device,
+ reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+ PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+ for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
+ {
+ uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
- static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
- customSampleOrderCount,
- reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
+ return static_cast<Result>(
+ d.vkFreeDescriptorSets( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ descriptorSetCount,
+ reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d ) const
{
- d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
- static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
- customSampleOrders.size(),
- reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
+ Result result = static_cast<Result>(
+ d.vkFreeDescriptorSets( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ descriptorSets.size(),
+ reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount,
- const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetColorWriteEnableEXT(
- m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
+ return static_cast<Result>(
+ d.vkFreeDescriptorSets( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ descriptorSetCount,
+ reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>(
+ d.vkFreeDescriptorSets( m_device,
+ static_cast<VkDescriptorPool>( descriptorPool ),
+ descriptorSets.size(),
+ reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::updateDescriptorSets( uint32_t descriptorWriteCount,
+ const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetColorWriteEnableEXT(
- m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
+ d.vkUpdateDescriptorSets( m_device,
+ descriptorWriteCount,
+ reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
+ descriptorCopyCount,
+ reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkUpdateDescriptorSets( m_device,
+ descriptorWrites.size(),
+ reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
+ descriptorCopies.size(),
+ reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+ return static_cast<Result>( d.vkCreateFramebuffer( m_device,
+ reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
+ Device::createFramebuffer( const FramebufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+ Result result = static_cast<Result>(
+ d.vkCreateFramebuffer( m_device,
+ reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
+ return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
+ Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+ Result result = static_cast<Result>(
+ d.vkCreateFramebuffer( m_device,
+ reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>(
+ result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds,
- float maxDepthBounds,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+ d.vkDestroyFramebuffer( m_device,
+ static_cast<VkFramebuffer>( framebuffer ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+ d.vkDestroyFramebuffer( m_device,
+ static_cast<VkFramebuffer>( framebuffer ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+ d.vkDestroyFramebuffer( m_device,
+ static_cast<VkFramebuffer>( framebuffer ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+ d.vkDestroyFramebuffer( m_device,
+ static_cast<VkFramebuffer>( framebuffer ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+ return static_cast<Result>( d.vkCreateRenderPass( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ Device::createRenderPass( const RenderPassCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>(
+ d.vkCreateRenderPass( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
+ return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
}
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>(
+ d.vkCreateRenderPass( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
+ result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
- firstDiscardRectangle,
- discardRectangleCount,
- reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
+ d.vkDestroyRenderPass( m_device,
+ static_cast<VkRenderPass>( renderPass ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
- firstDiscardRectangle,
- discardRectangles.size(),
- reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
+ d.vkDestroyRenderPass( m_device,
+ static_cast<VkRenderPass>( renderPass ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ d.vkDestroyRenderPass( m_device,
+ static_cast<VkRenderPass>( renderPass ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetEvent2KHR( m_commandBuffer,
- static_cast<VkEvent>( event ),
- reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
+ d.vkDestroyRenderPass( m_device,
+ static_cast<VkRenderPass>( renderPass ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetRenderAreaGranularity(
+ m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- const DependencyInfoKHR & dependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
+ Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetEvent2KHR( m_commandBuffer,
- static_cast<VkEvent>( event ),
- reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
+ VULKAN_HPP_NAMESPACE::Extent2D granularity;
+ d.vkGetRenderAreaGranularity(
+ m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+ return granularity;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
- firstExclusiveScissor,
- exclusiveScissorCount,
- reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
+ return static_cast<Result>( d.vkCreateCommandPool( m_device,
+ reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
+ Device::createCommandPool( const CommandPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
- firstExclusiveScissor,
- exclusiveScissors.size(),
- reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
+ VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+ Result result = static_cast<Result>(
+ d.vkCreateCommandPool( m_device,
+ reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
+ return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV(
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
+ Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer,
- static_cast<VkFragmentShadingRateNV>( shadingRate ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+ Result result = static_cast<Result>(
+ d.vkCreateCommandPool( m_device,
+ reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>(
+ result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
- const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
- reinterpret_cast<const VkExtent2D *>( pFragmentSize ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ d.vkDestroyCommandPool( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
- const Extent2D & fragmentSize,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
- reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ d.vkDestroyCommandPool( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+ d.vkDestroyCommandPool( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor,
- uint16_t lineStipplePattern,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+ d.vkDestroyCommandPool( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+ return static_cast<Result>( d.vkResetCommandPool(
+ m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
}
-
+#else
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
+ Dispatch const & d ) const
{
- d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
+ Result result = static_cast<Result>( d.vkResetCommandPool(
+ m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
+ return static_cast<Result>(
+ d.vkAllocateCommandBuffers( m_device,
+ reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ),
+ reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CommandBufferAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+ {
+ std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
+ Result result = static_cast<Result>(
+ d.vkAllocateCommandBuffers( m_device,
+ reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
+ return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+ }
+
+ template <typename CommandBufferAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
+ CommandBufferAllocator & commandBufferAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount,
+ commandBufferAllocator );
+ Result result = static_cast<Result>(
+ d.vkAllocateCommandBuffers( m_device,
+ reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
+ return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch, typename CommandBufferAllocator>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
+ std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ Result result = static_cast<Result>(
+ d.vkAllocateCommandBuffers( m_device,
+ reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+ PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+ for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
+ {
+ uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
}
+ template <typename Dispatch,
+ typename CommandBufferAllocator,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
+ int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
+ CommandBufferAllocator & commandBufferAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers(
+ commandBufferAllocator );
+ std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ Result result = static_cast<Result>(
+ d.vkAllocateCommandBuffers( m_device,
+ reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+ PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+ for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
+ {
+ uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
+ d.vkFreeCommandBuffers( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ commandBufferCount,
+ reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
+ d.vkFreeCommandBuffers( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ commandBuffers.size(),
+ reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
+ d.vkFreeCommandBuffers( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ commandBufferCount,
+ reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
+ d.vkFreeCommandBuffers( m_device,
+ static_cast<VkCommandPool>( commandPool ),
+ commandBuffers.size(),
+ reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin(
+ const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
+ return static_cast<Result>(
+ d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo,
- Dispatch const & d ) const
+ CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+ Result result = static_cast<Result>(
+ d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
+ return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
}
-
+#else
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ CommandBuffer::end( Dispatch const & d ) const
{
- d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset(
+ VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
+ return static_cast<Result>(
+ d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
+ {
+ Result result =
+ static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize,
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
+ d.vkCmdBindPipeline(
+ m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
- reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
+ d.vkCmdSetViewport(
+ m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
- reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
+ d.vkCmdSetViewport(
+ m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -104308,23 +106526,34 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+ d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor,
+ float depthBiasClamp,
+ float depthBiasSlopeFactor,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetScissorWithCountEXT(
- m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+ d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds,
+ float maxDepthBounds,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
@@ -104335,19 +106564,11 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- VULKAN_HPP_NAMESPACE::StencilOp failOp,
- VULKAN_HPP_NAMESPACE::StencilOp passOp,
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
- VULKAN_HPP_NAMESPACE::CompareOp compareOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ uint32_t writeMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetStencilOpEXT( m_commandBuffer,
- static_cast<VkStencilFaceFlags>( faceMask ),
- static_cast<VkStencilOp>( failOp ),
- static_cast<VkStencilOp>( passOp ),
- static_cast<VkStencilOp>( depthFailOp ),
- static_cast<VkCompareOp>( compareOp ) );
+ d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
template <typename Dispatch>
@@ -104359,262 +106580,332 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ uint32_t dynamicOffsetCount,
+ const uint32_t * pDynamicOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+ d.vkCmdBindDescriptorSets( m_commandBuffer,
+ static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
+ static_cast<VkPipelineLayout>( layout ),
+ firstSet,
+ descriptorSetCount,
+ reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
+ dynamicOffsetCount,
+ pDynamicOffsets );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ ArrayProxy<const uint32_t> const & dynamicOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBindDescriptorSets( m_commandBuffer,
+ static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
+ static_cast<VkPipelineLayout>( layout ),
+ firstSet,
+ descriptorSets.size(),
+ reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
+ dynamicOffsets.size(),
+ dynamicOffsets.data() );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t writeMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::IndexType indexType,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ d.vkCmdBindIndexBuffer( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkIndexType>( indexType ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
- uint32_t vertexBindingDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
- uint32_t vertexAttributeDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetVertexInputEXT(
- m_commandBuffer,
- vertexBindingDescriptionCount,
- reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
- vertexAttributeDescriptionCount,
- reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
+ d.vkCmdBindVertexBuffers( m_commandBuffer,
+ firstBinding,
+ bindingCount,
+ reinterpret_cast<const VkBuffer *>( pBuffers ),
+ reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- d.vkCmdSetVertexInputEXT(
- m_commandBuffer,
- vertexBindingDescriptions.size(),
- reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
- vertexAttributeDescriptions.size(),
- reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+# else
+ if ( buffers.size() != offsets.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdBindVertexBuffers( m_commandBuffer,
+ firstBinding,
+ buffers.size(),
+ reinterpret_cast<const VkBuffer *>( buffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount,
+ uint32_t instanceCount,
+ uint32_t firstVertex,
+ uint32_t firstInstance,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewport(
- m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount,
+ uint32_t instanceCount,
+ uint32_t firstIndex,
+ int32_t vertexOffset,
+ uint32_t firstInstance,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewport(
- m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+ d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer,
- firstViewport,
- viewportCount,
- reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
+ d.vkCmdDrawIndirect(
+ m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewportShadingRatePaletteNV(
- m_commandBuffer,
- firstViewport,
- shadingRatePalettes.size(),
- reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
+ d.vkCmdDrawIndexedIndirect(
+ m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewportWScalingNV( m_commandBuffer,
- firstViewport,
- viewportCount,
- reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
+ d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyBuffer( m_commandBuffer,
+ static_cast<VkBuffer>( srcBuffer ),
+ static_cast<VkBuffer>( dstBuffer ),
+ regionCount,
+ reinterpret_cast<const VkBufferCopy *>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewportWScalingNV( m_commandBuffer,
- firstViewport,
- viewportWScalings.size(),
- reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
+ d.vkCmdCopyBuffer( m_commandBuffer,
+ static_cast<VkBuffer>( srcBuffer ),
+ static_cast<VkBuffer>( dstBuffer ),
+ regions.size(),
+ reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewportWithCountEXT(
- m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdCopyImage( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regionCount,
+ reinterpret_cast<const VkImageCopy *>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdSetViewportWithCountEXT(
- m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+ d.vkCmdCopyImage( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regions.size(),
+ reinterpret_cast<const VkImageCopy *>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
+ VULKAN_HPP_NAMESPACE::Filter filter,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdTraceRaysIndirectKHR(
- m_commandBuffer,
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
- static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+ d.vkCmdBlitImage( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regionCount,
+ reinterpret_cast<const VkImageBlit *>( pRegions ),
+ static_cast<VkFilter>( filter ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
+ VULKAN_HPP_NAMESPACE::Filter filter,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdTraceRaysIndirectKHR(
- m_commandBuffer,
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
- static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+ d.vkCmdBlitImage( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regions.size(),
+ reinterpret_cast<const VkImageBlit *>( regions.data() ),
+ static_cast<VkFilter>( filter ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
- uint32_t width,
- uint32_t height,
- uint32_t depth,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdTraceRaysKHR( m_commandBuffer,
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
- width,
- height,
- depth );
+ d.vkCmdCopyBufferToImage( m_commandBuffer,
+ static_cast<VkBuffer>( srcBuffer ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regionCount,
+ reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- uint32_t width,
- uint32_t height,
- uint32_t depth,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdTraceRaysKHR( m_commandBuffer,
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
- width,
- height,
- depth );
+ d.vkCmdCopyBufferToImage( m_commandBuffer,
+ static_cast<VkBuffer>( srcBuffer ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regions.size(),
+ reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
- VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
- VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdTraceRaysNV( m_commandBuffer,
- static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
- static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
- static_cast<VkBuffer>( missShaderBindingTableBuffer ),
- static_cast<VkDeviceSize>( missShaderBindingOffset ),
- static_cast<VkDeviceSize>( missShaderBindingStride ),
- static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
- static_cast<VkDeviceSize>( hitShaderBindingOffset ),
- static_cast<VkDeviceSize>( hitShaderBindingStride ),
- static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
- static_cast<VkDeviceSize>( callableShaderBindingOffset ),
- static_cast<VkDeviceSize>( callableShaderBindingStride ),
- width,
- height,
- depth );
+ d.vkCmdCopyImageToBuffer( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkBuffer>( dstBuffer ),
+ regionCount,
+ reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdCopyImageToBuffer( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkBuffer>( dstBuffer ),
+ regions.size(),
+ reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
@@ -104646,6 +106937,172 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ uint32_t data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdFillBuffer( m_commandBuffer,
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ static_cast<VkDeviceSize>( size ),
+ data );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
+ uint32_t rangeCount,
+ const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdClearColorImage( m_commandBuffer,
+ static_cast<VkImage>( image ),
+ static_cast<VkImageLayout>( imageLayout ),
+ reinterpret_cast<const VkClearColorValue *>( pColor ),
+ rangeCount,
+ reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const ClearColorValue & color,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdClearColorImage( m_commandBuffer,
+ static_cast<VkImage>( image ),
+ static_cast<VkImageLayout>( imageLayout ),
+ reinterpret_cast<const VkClearColorValue *>( &color ),
+ ranges.size(),
+ reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
+ uint32_t rangeCount,
+ const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdClearDepthStencilImage( m_commandBuffer,
+ static_cast<VkImage>( image ),
+ static_cast<VkImageLayout>( imageLayout ),
+ reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
+ rangeCount,
+ reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const ClearDepthStencilValue & depthStencil,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdClearDepthStencilImage( m_commandBuffer,
+ static_cast<VkImage>( image ),
+ static_cast<VkImageLayout>( imageLayout ),
+ reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
+ ranges.size(),
+ reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
+ uint32_t rectCount,
+ const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdClearAttachments( m_commandBuffer,
+ attachmentCount,
+ reinterpret_cast<const VkClearAttachment *>( pAttachments ),
+ rectCount,
+ reinterpret_cast<const VkClearRect *>( pRects ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdClearAttachments( m_commandBuffer,
+ attachments.size(),
+ reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
+ rects.size(),
+ reinterpret_cast<const VkClearRect *>( rects.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdResolveImage( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regionCount,
+ reinterpret_cast<const VkImageResolve *>( pRegions ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdResolveImage( m_commandBuffer,
+ static_cast<VkImage>( srcImage ),
+ static_cast<VkImageLayout>( srcImageLayout ),
+ static_cast<VkImage>( dstImage ),
+ static_cast<VkImageLayout>( dstImageLayout ),
+ regions.size(),
+ reinterpret_cast<const VkImageResolve *>( regions.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+ }
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void
CommandBuffer::waitEvents( uint32_t eventCount,
const VULKAN_HPP_NAMESPACE::Event * pEvents,
@@ -104699,141 +107156,78 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents2KHR( uint32_t eventCount,
- const VULKAN_HPP_NAMESPACE::Event * pEvents,
- const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkCmdWaitEvents2KHR( m_commandBuffer,
- eventCount,
- reinterpret_cast<const VkEvent *>( pEvents ),
- reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfos ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
- {
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
-# else
- if ( events.size() != dependencyInfos.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
-
- d.vkCmdWaitEvents2KHR( m_commandBuffer,
- events.size(),
- reinterpret_cast<const VkEvent *>( events.data() ),
- reinterpret_cast<const VkDependencyInfoKHR *>( dependencyInfos.data() ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteAccelerationStructuresPropertiesKHR(
- m_commandBuffer,
- accelerationStructureCount,
- reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ d.vkCmdPipelineBarrier( m_commandBuffer,
+ static_cast<VkPipelineStageFlags>( srcStageMask ),
+ static_cast<VkPipelineStageFlags>( dstStageMask ),
+ static_cast<VkDependencyFlags>( dependencyFlags ),
+ memoryBarrierCount,
+ reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
+ bufferMemoryBarrierCount,
+ reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
+ imageMemoryBarrierCount,
+ reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteAccelerationStructuresPropertiesKHR(
- m_commandBuffer,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ d.vkCmdPipelineBarrier( m_commandBuffer,
+ static_cast<VkPipelineStageFlags>( srcStageMask ),
+ static_cast<VkPipelineStageFlags>( dstStageMask ),
+ static_cast<VkDependencyFlags>( dependencyFlags ),
+ memoryBarriers.size(),
+ reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
+ bufferMemoryBarriers.size(),
+ reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
+ imageMemoryBarriers.size(),
+ reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkCmdWriteAccelerationStructuresPropertiesNV(
- m_commandBuffer,
- accelerationStructureCount,
- reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteAccelerationStructuresPropertiesNV(
- m_commandBuffer,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ d.vkCmdBeginQuery(
+ m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- uint32_t marker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteBufferMarker2AMD( m_commandBuffer,
- static_cast<VkPipelineStageFlags2KHR>( stage ),
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- marker );
+ d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- uint32_t marker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
- static_cast<VkPipelineStageFlagBits>( pipelineStage ),
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- marker );
+ d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
template <typename Dispatch>
@@ -104849,3262 +107243,2577 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdWriteTimestamp2KHR(
- m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdCopyQueryPoolResults( m_commandBuffer,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) );
}
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ uint32_t size,
+ const void * pValues,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+ d.vkCmdPushConstants( m_commandBuffer,
+ static_cast<VkPipelineLayout>( layout ),
+ static_cast<VkShaderStageFlags>( stageFlags ),
+ offset,
+ size,
+ pValues );
}
-#else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::end( Dispatch const & d ) const
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ ArrayProxy<const T> const & values,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
+ d.vkCmdPushConstants( m_commandBuffer,
+ static_cast<VkPipelineLayout>( layout ),
+ static_cast<VkShaderStageFlags>( stageFlags ),
+ offset,
+ values.size() * sizeof( T ),
+ reinterpret_cast<const void *>( values.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset(
- VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ d.vkCmdBeginRenderPass( m_commandBuffer,
+ reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
+ static_cast<VkSubpassContents>( contents ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result =
- static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
+ d.vkCmdBeginRenderPass( m_commandBuffer,
+ reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
+ static_cast<VkSubpassContents>( contents ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
-# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
}
-# else
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>(
- d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
+ d.vkCmdEndRenderPass( m_commandBuffer );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
- uint32_t * pImageIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkAcquireNextImage2KHR(
- m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
+ d.vkCmdExecuteCommands(
+ m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
- Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- uint32_t imageIndex;
- Result result = static_cast<Result>( d.vkAcquireNextImage2KHR(
- m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
- return createResultValue( result,
- imageIndex,
- VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eTimeout,
- VULKAN_HPP_NAMESPACE::Result::eNotReady,
- VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+ d.vkCmdExecuteCommands(
+ m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VERSION_1_1 ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence,
- uint32_t * pImageIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion,
+ Dispatch const & d ) VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- timeout,
- static_cast<VkSemaphore>( semaphore ),
- static_cast<VkFence>( fence ),
- pImageIndex ) );
+ return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
- Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
{
- uint32_t imageIndex;
- Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- timeout,
- static_cast<VkSemaphore>( semaphore ),
- static_cast<VkFence>( fence ),
- &imageIndex ) );
- return createResultValue( result,
- imageIndex,
- VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eTimeout,
- VULKAN_HPP_NAMESPACE::Result::eNotReady,
- VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+ uint32_t apiVersion;
+ Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
+ return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::bindBufferMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
- m_device,
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
+ return static_cast<Result>( d.vkBindBufferMemory2(
+ m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
- Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
- Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
- m_device,
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
- return createResultValue(
- result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
+ Result result = static_cast<Result>( d.vkBindBufferMemory2(
+ m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
- Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::bindImageMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
- Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
- m_device,
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
- ObjectRelease<Device, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>(
- result,
- configuration,
- VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique",
- deleter );
+ return static_cast<Result>(
+ d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR(
- const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
+ Result result = static_cast<Result>( d.vkBindImageMemory2(
+ m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
+ heapIndex,
+ localDeviceIndex,
+ remoteDeviceIndex,
+ reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
+ Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>(
- d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+ d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
+ heapIndex,
+ localDeviceIndex,
+ remoteDeviceIndex,
+ reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+ return peerMemoryFeatures;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
+ d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CommandBufferAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
- Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
+ uint32_t baseGroupY,
+ uint32_t baseGroupZ,
+ uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+ d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
- template <typename CommandBufferAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
- Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
- CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups(
+ uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount,
- commandBufferAllocator );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
+ m_instance,
+ pPhysicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch, typename CommandBufferAllocator>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
- Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
{
- std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
- std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+ uint32_t physicalDeviceGroupCount;
+ Result result;
+ do
{
- uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
- PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
- for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
+ result =
+ static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
{
- uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
+ m_instance,
+ &physicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
return createResultValue(
- result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
}
- template <typename Dispatch,
- typename CommandBufferAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
- int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
- Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
- CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d ) const
+ template <
+ typename PhysicalDeviceGroupPropertiesAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ Instance::enumeratePhysicalDeviceGroups(
+ PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
{
- std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers(
- commandBufferAllocator );
- std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
+ physicalDeviceGroupPropertiesAllocator );
+ uint32_t physicalDeviceGroupCount;
+ Result result;
+ do
{
- uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
- PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
- for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
+ result =
+ static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
{
- uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
+ m_instance,
+ &physicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
return createResultValue(
- result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
+ d.vkGetImageMemoryRequirements2( m_device,
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DescriptorSetAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetImageMemoryRequirements2( m_device,
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return memoryRequirements;
}
- template <typename DescriptorSetAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
- DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d ) const
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount,
- descriptorSetAllocator );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetImageMemoryRequirements2( m_device,
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return structureChain;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch, typename DescriptorSetAllocator>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
- std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
- PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
- for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
- {
- uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
- }
- }
- return createResultValue(
- result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ d.vkGetBufferMemoryRequirements2( m_device,
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
- template <typename Dispatch,
- typename DescriptorSetAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
- int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
- DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets(
- descriptorSetAllocator );
- std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
- PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
- for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
- {
- uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
- }
- }
- return createResultValue(
- result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetBufferMemoryRequirements2( m_device,
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return memoryRequirements;
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetBufferMemoryRequirements2( m_device,
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2(
+ const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkAllocateMemory( m_device,
- reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
+ d.vkGetImageSparseMemoryRequirements2(
+ m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
- Device::allocateMemory( const MemoryAllocateInfo & allocateInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DeviceMemory memory;
- Result result = static_cast<Result>(
- d.vkAllocateMemory( m_device,
- reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
- return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
+ std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2( m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
+ &sparseMemoryRequirementCount,
+ nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements2(
+ m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ return sparseMemoryRequirements;
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
- Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <
+ typename SparseImageMemoryRequirements2Allocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements2(
+ const ImageSparseMemoryRequirementsInfo2 & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DeviceMemory memory;
- Result result = static_cast<Result>(
- d.vkAllocateMemory( m_device,
- reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
- ObjectFree<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>(
- result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter );
+ std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
+ sparseImageMemoryRequirements2Allocator );
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2( m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
+ &sparseMemoryRequirementCount,
+ nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ d.vkGetImageSparseMemoryRequirements2(
+ m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ return sparseMemoryRequirements;
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
- uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
- m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
+ d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindAccelerationStructureMemoryNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
+ PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
- m_device,
- bindInfos.size(),
- reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+ d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+ return features;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+ d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindBufferMemory( m_device,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
+ PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkBindBufferMemory( m_device,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ return properties;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindBufferMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindBufferMemory2(
- m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+ d.vkGetPhysicalDeviceFormatProperties2(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
+ PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkBindBufferMemory2(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+ VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+ d.vkGetPhysicalDeviceFormatProperties2(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ return formatProperties;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+ d.vkGetPhysicalDeviceFormatProperties2(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindBufferMemory2KHR(
- m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkBindBufferMemory2KHR(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+ return createResultValue(
+ result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d ) const
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+ return createResultValue(
+ result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindImageMemory( m_device,
- static_cast<VkImage>( image ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2(
+ m_physicalDevice,
+ pQueueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
}
-#else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename QueueFamilyProperties2Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkBindImageMemory( m_device,
- static_cast<VkImage>( image ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+ std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ return queueFamilyProperties;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindImageMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename QueueFamilyProperties2Allocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+ std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
+ queueFamilyProperties2Allocator );
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ return queueFamilyProperties;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
+ PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkBindImageMemory2(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ queueFamilyProperties[i].pNext =
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ }
+ return returnVector;
+ }
+
+ template <typename StructureChain,
+ typename StructureChainAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
+ PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
+ Dispatch const & d ) const
+ {
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount,
+ structureChainAllocator );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ queueFamilyProperties[i].pNext =
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ }
+ return returnVector;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindImageMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkBindImageMemory2KHR(
- m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+ d.vkGetPhysicalDeviceMemoryProperties2(
+ m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
+ PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkBindImageMemory2KHR(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+ d.vkGetPhysicalDeviceMemoryProperties2(
+ m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ return memoryProperties;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+ d.vkGetPhysicalDeviceMemoryProperties2(
+ m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t videoSessionBindMemoryCount,
- const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkBindVideoSessionMemoryKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- videoSessionBindMemoryCount,
- reinterpret_cast<const VkVideoBindMemoryKHR *>( pVideoSessionBindMemories ) ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
+ pPropertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindVideoSessionMemoryKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkBindVideoSessionMemoryKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- videoSessionBindMemories.size(),
- reinterpret_cast<const VkVideoBindMemoryKHR *>( videoSessionBindMemories.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
+ std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ return properties;
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <
+ typename SparseImageFormatProperties2Allocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2(
+ const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- infoCount,
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
+ std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
+ sparseImageFormatProperties2Allocator );
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ return properties;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
-# else
- if ( infos.size() != pBuildRangeInfos.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
-
- Result result = static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+ d.vkTrimCommandPool(
+ m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
+ VULKAN_HPP_NAMESPACE::Queue * pQueue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+ d.vkGetDeviceQueue2(
+ m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
+ Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result =
- static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
+ VULKAN_HPP_NAMESPACE::Queue queue;
+ d.vkGetDeviceQueue2(
+ m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
+ return queue;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCopyAccelerationStructureKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
+ d.vkCreateSamplerYcbcrConversion( m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkCopyAccelerationStructureKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>(
+ d.vkCreateSamplerYcbcrConversion( m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+ return createResultValue(
+ result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>(
+ d.vkCreateSamplerYcbcrConversion( m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
+ result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroySamplerYcbcrConversion( m_device,
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
+ d.vkDestroySamplerYcbcrConversion(
m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
+ d.vkDestroySamplerYcbcrConversion( m_device,
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
+ d.vkDestroySamplerYcbcrConversion(
m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate(
+ const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateAccelerationStructureKHR( m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
+ m_device,
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
- Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo,
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator,
Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
return createResultValue(
- result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
+ result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
- Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo,
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
Optional<const AllocationCallbacks> allocator,
Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>(
+ return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
result,
- accelerationStructure,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique",
+ descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique",
deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateAccelerationStructureNV( m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
+ d.vkDestroyDescriptorUpdateTemplate( m_device,
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
- Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
- return createResultValue(
- result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
- Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
+ d.vkDestroyDescriptorUpdateTemplate(
m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>(
- result,
- accelerationStructure,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique",
- deleter );
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateBuffer( m_device,
- reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkBuffer *>( pBuffer ) ) );
+ d.vkDestroyDescriptorUpdateTemplate( m_device,
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
- Device::createBuffer( const BufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Buffer buffer;
- Result result = static_cast<Result>(
- d.vkCreateBuffer( m_device,
- reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBuffer *>( &buffer ) ) );
- return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
+ d.vkDestroyDescriptorUpdateTemplate(
+ m_device,
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
- Device::createBufferUnique( const BufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Buffer buffer;
- Result result = static_cast<Result>(
- d.vkCreateBuffer( m_device,
- reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBuffer *>( &buffer ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>(
- result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter );
+ d.vkUpdateDescriptorSetWithTemplate( m_device,
+ static_cast<VkDescriptorSet>( descriptorSet ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ pData );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::BufferView * pView,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateBufferView( m_device,
- reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkBufferView *>( pView ) ) );
+ d.vkGetPhysicalDeviceExternalBufferProperties(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
- Device::createBufferView( const BufferViewCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
+ PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::BufferView view;
- Result result = static_cast<Result>(
- d.vkCreateBufferView( m_device,
- reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBufferView *>( &view ) ) );
- return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+ d.vkGetPhysicalDeviceExternalBufferProperties(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+ return externalBufferProperties;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
- Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::BufferView view;
- Result result = static_cast<Result>(
- d.vkCreateBufferView( m_device,
- reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBufferView *>( &view ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>(
- result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter );
+ d.vkGetPhysicalDeviceExternalFenceProperties(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
+ PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateCommandPool( m_device,
- reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+ d.vkGetPhysicalDeviceExternalFenceProperties(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+ return externalFenceProperties;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
- Device::createCommandPool( const CommandPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::CommandPool commandPool;
- Result result = static_cast<Result>(
- d.vkCreateCommandPool( m_device,
- reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
- return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
- Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
+ PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::CommandPool commandPool;
- Result result = static_cast<Result>(
- d.vkCreateCommandPool( m_device,
- reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>(
- result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter );
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+ return externalSemaphoreProperties;
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ d.vkGetDescriptorSetLayoutSupport( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createComputePipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <typename PipelineAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createComputePipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
- {
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
- Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
- {
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
- }
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
+ Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
- {
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
- }
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+ d.vkGetDescriptorSetLayoutSupport( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ return support;
}
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+ d.vkGetDescriptorSetLayoutSupport( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ return structureChain;
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateCuFunctionNVX( m_device,
- reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
- }
+ //=== VK_VERSION_1_2 ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
- Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
- Result result = static_cast<Result>(
- d.vkCreateCuFunctionNVX( m_device,
- reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
- return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
+ d.vkCmdDrawIndirectCount( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
- Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
- Result result = static_cast<Result>(
- d.vkCreateCuFunctionNVX( m_device,
- reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>(
- result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter );
+ d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
+ Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
- reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
+ return static_cast<Result>( d.vkCreateRenderPass2( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
+ reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
- Device::createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo,
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo,
Optional<const AllocationCallbacks> allocator,
Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::CuModuleNVX module;
- Result result = static_cast<Result>(
- d.vkCreateCuModuleNVX( m_device,
- reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>(
+ d.vkCreateRenderPass2( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
- return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
+ reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
+ return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
- Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo,
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo,
Optional<const AllocationCallbacks> allocator,
Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::CuModuleNVX module;
- Result result = static_cast<Result>(
- d.vkCreateCuModuleNVX( m_device,
- reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>(
+ d.vkCreateRenderPass2( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
+ reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>(
- result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter );
+ return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
+ result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateDeferredOperationKHR( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
+ d.vkCmdBeginRenderPass2( m_commandBuffer,
+ reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
- Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin,
+ const SubpassBeginInfo & subpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
- Result result = static_cast<Result>(
- d.vkCreateDeferredOperationKHR( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
- return createResultValue(
- result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
+ d.vkCmdBeginRenderPass2( m_commandBuffer,
+ reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
- Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
- Result result = static_cast<Result>(
- d.vkCreateDeferredOperationKHR( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>(
- result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter );
+ d.vkCmdNextSubpass2( m_commandBuffer,
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
+ reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo,
+ const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateDescriptorPool( m_device,
- reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
+ d.vkCmdNextSubpass2( m_commandBuffer,
+ reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
+ reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
- Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
- Result result = static_cast<Result>(
- d.vkCreateDescriptorPool( m_device,
- reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
- return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
+ d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
- Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
- Result result = static_cast<Result>(
- d.vkCreateDescriptorPool( m_device,
- reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>(
- result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter );
+ d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue(
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateDescriptorSetLayout( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
+ d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
- Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
+ Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
- Result result = static_cast<Result>(
- d.vkCreateDescriptorSetLayout( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
- return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
+ uint64_t value;
+ Result result =
+ static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
+ return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
- Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
- Result result = static_cast<Result>(
- d.vkCreateDescriptorSetLayout( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>(
- result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter );
+ return static_cast<Result>(
+ d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+ Result result = static_cast<Result>(
+ d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore(
+ const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- return createResultValue(
- result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
+ return static_cast<Result>(
+ d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
- result,
- descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique",
- deleter );
+ Result result = static_cast<Result>(
+ d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- return createResultValue(
- result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
+ return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress(
+ const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
- result,
- descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique",
- deleter );
+ return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Event * pEvent,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateEvent( m_device,
- reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkEvent *>( pEvent ) ) );
+ return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
- Device::createEvent( const EventCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE uint64_t
+ Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Event event;
- Result result = static_cast<Result>(
- d.vkCreateEvent( m_device,
- reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkEvent *>( &event ) ) );
- return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddress(
+ m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
- Device::createEventUnique( const EventCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Event event;
- Result result = static_cast<Result>(
- d.vkCreateEvent( m_device,
- reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkEvent *>( &event ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>(
- result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddress(
+ m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateFence( m_device,
- reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkFence *>( pFence ) ) );
+ d.vkDestroySurfaceKHR(
+ m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- Device::createFence( const FenceCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkCreateFence( m_device,
- reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
+ d.vkDestroySurfaceKHR( m_instance,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- Device::createFenceUnique( const FenceCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkCreateFence( m_device,
- reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
- result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter );
+ d.vkDestroySurfaceKHR(
+ m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroySurfaceKHR( m_instance,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateFramebuffer( m_device,
- reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
+ queueFamilyIndex,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkBool32 *>( pSupported ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
- Device::createFramebuffer( const FramebufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
+ PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
- Result result = static_cast<Result>(
- d.vkCreateFramebuffer( m_device,
- reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
- return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
+ VULKAN_HPP_NAMESPACE::Bool32 supported;
+ Result result =
+ static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
+ queueFamilyIndex,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkBool32 *>( &supported ) ) );
+ return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+ m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
- Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
+ PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
- Result result = static_cast<Result>(
- d.vkCreateFramebuffer( m_device,
- reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>(
- result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter );
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+ m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
+ return createResultValue(
+ result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ pSurfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename SurfaceFormatKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ }
return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
}
- template <typename PipelineAllocator,
+ template <typename SurfaceFormatKHRAllocator,
typename Dispatch,
typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
+ uint32_t surfaceFormatCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ }
return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
- Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ pPresentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PresentModeKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
+ uint32_t presentModeCount;
+ Result result;
+ do
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentModeCount )
{
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ presentModes.resize( presentModeCount );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
+ {
+ presentModes.resize( presentModeCount );
}
return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
}
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ template <typename PresentModeKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ PresentModeKHRAllocator & presentModeKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
+ uint32_t presentModeCount;
+ Result result;
+ do
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentModeCount )
{
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ presentModes.resize( presentModeCount );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
+ {
+ presentModes.resize( presentModeCount );
}
return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_swapchain ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Image * pImage,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateImage( m_device,
- reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkImage *>( pImage ) ) );
+ return static_cast<Result>(
+ d.vkCreateSwapchainKHR( m_device,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
- Device::createImage( const ImageCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
+ Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Image image;
- Result result = static_cast<Result>(
- d.vkCreateImage( m_device,
- reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImage *>( &image ) ) );
- return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+ Result result = static_cast<Result>(
+ d.vkCreateSwapchainKHR( m_device,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
+ return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
- Device::createImageUnique( const ImageCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
+ Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Image image;
- Result result = static_cast<Result>(
- d.vkCreateImage( m_device,
- reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImage *>( &image ) ) );
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+ Result result = static_cast<Result>(
+ d.vkCreateSwapchainKHR( m_device,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>(
- result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>(
+ result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ImageView * pView,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateImageView( m_device,
- reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkImageView *>( pView ) ) );
+ d.vkDestroySwapchainKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
- Device::createImageView( const ImageViewCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::ImageView view;
- Result result = static_cast<Result>(
- d.vkCreateImageView( m_device,
- reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImageView *>( &view ) ) );
- return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
- Device::createImageViewUnique( const ImageViewCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ImageView view;
- Result result = static_cast<Result>(
- d.vkCreateImageView( m_device,
- reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImageView *>( &view ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>(
- result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter );
+ d.vkDestroySwapchainKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV(
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateIndirectCommandsLayoutNV( m_device,
- reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
+ d.vkDestroySwapchainKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
- Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
- Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
- m_device,
- reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
- return createResultValue(
- result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
- Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
- Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
- m_device,
- reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
- result,
- indirectCommandsLayout,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique",
- deleter );
+ d.vkDestroySwapchainKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pSwapchainImageCount,
+ VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreatePipelineCache( m_device,
- reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
+ return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ pSwapchainImageCount,
+ reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
- Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename ImageAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
+ Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
- Result result = static_cast<Result>(
- d.vkCreatePipelineCache( m_device,
- reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
- return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
+ std::vector<Image, ImageAllocator> swapchainImages;
+ uint32_t swapchainImageCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && swapchainImageCount )
+ {
+ swapchainImages.resize( swapchainImageCount );
+ result =
+ static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ &swapchainImageCount,
+ reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
+ VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
+ {
+ swapchainImages.resize( swapchainImageCount );
+ }
+ return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
- Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename ImageAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
+ Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ ImageAllocator & imageAllocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
- Result result = static_cast<Result>(
- d.vkCreatePipelineCache( m_device,
- reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>(
- result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter );
+ std::vector<Image, ImageAllocator> swapchainImages( imageAllocator );
+ uint32_t swapchainImageCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && swapchainImageCount )
+ {
+ swapchainImages.resize( swapchainImageCount );
+ result =
+ static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ &swapchainImageCount,
+ reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
+ VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
+ {
+ swapchainImages.resize( swapchainImageCount );
+ }
+ return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ uint32_t * pImageIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreatePipelineLayout( m_device,
- reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
+ return static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ timeout,
+ static_cast<VkSemaphore>( semaphore ),
+ static_cast<VkFence>( fence ),
+ pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
- Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
- Result result = static_cast<Result>(
- d.vkCreatePipelineLayout( m_device,
- reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
- return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
- Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
+ Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
- Result result = static_cast<Result>(
- d.vkCreatePipelineLayout( m_device,
- reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>(
- result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter );
+ uint32_t imageIndex;
+ Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ timeout,
+ static_cast<VkSemaphore>( semaphore ),
+ static_cast<VkFence>( fence ),
+ &imageIndex ) );
+ return createResultValue( result,
+ imageIndex,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eTimeout,
+ VULKAN_HPP_NAMESPACE::Result::eNotReady,
+ VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR(
+ const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreatePrivateDataSlotEXT( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPrivateDataSlotEXT *>( pPrivateDataSlot ) ) );
+ d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type
- Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
- Result result = static_cast<Result>(
- d.vkCreatePrivateDataSlotEXT( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
+ Result result =
+ static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
return createResultValue(
- result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type
- Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
- Result result = static_cast<Result>(
- d.vkCreatePrivateDataSlotEXT( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>(
- result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter );
+ result,
+ VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateQueryPool( m_device,
- reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
+ return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
+ m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
- Device::createQueryPool( const QueryPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::QueryPool queryPool;
- Result result = static_cast<Result>(
- d.vkCreateQueryPool( m_device,
- reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
- return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
- Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
+ Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::QueryPool queryPool;
- Result result = static_cast<Result>(
- d.vkCreateQueryPool( m_device,
- reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>(
- result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter );
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+ Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
+ m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
+ return createResultValue(
+ result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateRayTracingPipelinesKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue( result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <typename PipelineAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue( result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+ Result result = static_cast<Result>(
+ d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
+ return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
- Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pRectCount,
+ VULKAN_HPP_NAMESPACE::Rect2D * pRects,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue( result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Rect2DAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
+ PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ std::vector<Rect2D, Rect2DAllocator> rects;
+ uint32_t rectCount;
+ Result result;
+ do
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
+ result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && rectCount )
{
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ rects.resize( rectCount );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ &rectCount,
+ reinterpret_cast<VkRect2D *>( rects.data() ) ) );
+ VULKAN_HPP_ASSERT( rectCount <= rects.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
+ {
+ rects.resize( rectCount );
}
- return createResultValue( result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
}
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ template <typename Rect2DAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
+ PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Rect2DAllocator & rect2DAllocator,
+ Dispatch const & d ) const
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator );
+ uint32_t rectCount;
+ Result result;
+ do
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
+ result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && rectCount )
{
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ rects.resize( rectCount );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ &rectCount,
+ reinterpret_cast<VkRect2D *>( rects.data() ) ) );
+ VULKAN_HPP_ASSERT( rectCount <= rects.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
+ {
+ rects.resize( rectCount );
}
- return createResultValue( result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
+ uint32_t * pImageIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>( result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING
- "::Device::createRayTracingPipelineKHRUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+ return static_cast<Result>( d.vkAcquireNextImage2KHR(
+ m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
+ Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
+ {
+ uint32_t imageIndex;
+ Result result = static_cast<Result>( d.vkAcquireNextImage2KHR(
+ m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
+ return createResultValue( result,
+ imageIndex,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eTimeout,
+ VULKAN_HPP_NAMESPACE::Result::eNotReady,
+ VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_display ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
+ m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
{
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result =
+ static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
}
- template <typename PipelineAllocator,
+ template <typename DisplayPropertiesKHRAllocator,
typename Dispatch,
typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result =
+ static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
- Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
{
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
}
return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
}
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ template <typename DisplayPlanePropertiesKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlanePropertiesKHR(
+ DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
{
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties(
+ displayPlanePropertiesKHRAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
{
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
}
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
}
return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
+ uint32_t * pDisplayCount,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateRenderPass( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+ return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
+ m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- Device::createRenderPass( const RenderPassCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename DisplayKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
- d.vkCreateRenderPass( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
+ std::vector<DisplayKHR, DisplayKHRAllocator> displays;
+ uint32_t displayCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && displayCount )
+ {
+ displays.resize( displayCount );
+ result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
+ m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
+ VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
+ {
+ displays.resize( displayCount );
+ }
+ return createResultValue(
+ result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename DisplayKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
+ DisplayKHRAllocator & displayKHRAllocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
- d.vkCreateRenderPass( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
- result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter );
+ std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
+ uint32_t displayCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && displayCount )
+ {
+ displays.resize( displayCount );
+ result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
+ m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
+ VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
+ {
+ displays.resize( displayCount );
+ }
+ return createResultValue(
+ result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateRenderPass2( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+ return static_cast<Result>(
+ d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ pPropertyCount,
+ reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
- d.vkCreateRenderPass2( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
+ std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>(
+ d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ &propertyCount,
+ reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <typename DisplayModePropertiesKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
- d.vkCreateRenderPass2( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
- result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter );
+ std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties(
+ displayModePropertiesKHRAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>(
+ d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ &propertyCount,
+ reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateRenderPass2KHR( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
+ d.vkCreateDisplayModeKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+ reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
+ PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayModeCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
- d.vkCreateRenderPass2KHR( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+ Result result = static_cast<Result>(
+ d.vkCreateDisplayModeKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
+ reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
+ return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
+ PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayModeCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
- d.vkCreateRenderPass2KHR( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+ Result result = static_cast<Result>(
+ d.vkCreateDisplayModeKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
- result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter );
+ reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
+ ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>(
+ result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Sampler * pSampler,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
+ uint32_t planeIndex,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateSampler( m_device,
- reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSampler *>( pSampler ) ) );
+ return static_cast<Result>(
+ d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
+ static_cast<VkDisplayModeKHR>( mode ),
+ planeIndex,
+ reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
- Device::createSampler( const SamplerCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::Sampler sampler;
- Result result = static_cast<Result>(
- d.vkCreateSampler( m_device,
- reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSampler *>( &sampler ) ) );
- return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
- Device::createSamplerUnique( const SamplerCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
+ PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
+ uint32_t planeIndex,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Sampler sampler;
- Result result = static_cast<Result>(
- d.vkCreateSampler( m_device,
- reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSampler *>( &sampler ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>(
- result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter );
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
+ Result result = static_cast<Result>(
+ d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
+ static_cast<VkDisplayModeKHR>( mode ),
+ planeIndex,
+ reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
+ return createResultValue(
+ result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateSamplerYcbcrConversion( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
+ d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>(
- d.vkCreateSamplerYcbcrConversion( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- return createResultValue(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>(
- d.vkCreateSamplerYcbcrConversion( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateSamplerYcbcrConversionKHR( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
- m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- return createResultValue(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
- m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkCreateSemaphore( m_device,
- reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
- Device::createSemaphore( const SemaphoreCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::Semaphore semaphore;
- Result result = static_cast<Result>(
- d.vkCreateSemaphore( m_device,
- reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
- return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
- Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::Semaphore semaphore;
- Result result = static_cast<Result>(
- d.vkCreateSemaphore( m_device,
- reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>(
- result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter );
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateShaderModule( m_device,
- reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
- Device::createShaderModule( const ShaderModuleCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
- Result result = static_cast<Result>(
- d.vkCreateShaderModule( m_device,
- reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
- return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
- Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
- Result result = static_cast<Result>(
- d.vkCreateShaderModule( m_device,
- reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>(
- result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_display_swapchain ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
@@ -108270,114 +109979,726 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+ //=== VK_KHR_xlib_surface ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateSwapchainKHR( m_device,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
+ d.vkCreateXlibSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
- Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
- Result result = static_cast<Result>(
- d.vkCreateSwapchainKHR( m_device,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
- return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateXlibSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
- Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
- Result result = static_cast<Result>(
- d.vkCreateSwapchainKHR( m_device,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>(
- result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateXlibSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
+ Display * dpy,
+ VisualID visualID,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
+ Display & dpy,
+ VisualID visualID,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+ //=== VK_KHR_xcb_surface ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateValidationCacheEXT( m_device,
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
+ d.vkCreateXcbSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateXcbSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateXcbSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
+ xcb_connection_t * connection,
+ xcb_visualid_t visual_id,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
+ xcb_connection_t & connection,
+ xcb_visualid_t visual_id,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+ //=== VK_KHR_wayland_surface ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkCreateWaylandSurfaceKHR( m_instance,
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateWaylandSurfaceKHR( m_instance,
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateWaylandSurfaceKHR( m_instance,
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
+ uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
+ uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_KHR_android_surface ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkCreateAndroidSurfaceKHR( m_instance,
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateAndroidSurfaceKHR( m_instance,
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateAndroidSurfaceKHR( m_instance,
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_win32_surface ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkCreateWin32SurfaceKHR( m_instance,
+ reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateWin32SurfaceKHR( m_instance,
+ reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateWin32SurfaceKHR( m_instance,
+ reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR(
+ uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_EXT_debug_report ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkCreateDebugReportCallbackEXT( m_instance,
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
- Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
+ Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
- Result result = static_cast<Result>(
- d.vkCreateValidationCacheEXT( m_device,
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+ Result result = static_cast<Result>(
+ d.vkCreateDebugReportCallbackEXT( m_instance,
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
return createResultValue(
- result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
+ result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
- Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
+ Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
- Result result = static_cast<Result>(
- d.vkCreateValidationCacheEXT( m_device,
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>(
- result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter );
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+ Result result = static_cast<Result>(
+ d.vkCreateDebugReportCallbackEXT( m_instance,
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>(
+ result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDebugReportCallbackEXT( m_instance,
+ static_cast<VkDebugReportCallbackEXT>( callback ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDebugReportCallbackEXT(
+ m_instance,
+ static_cast<VkDebugReportCallbackEXT>( callback ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDebugReportCallbackEXT( m_instance,
+ static_cast<VkDebugReportCallbackEXT>( callback ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyDebugReportCallbackEXT(
+ m_instance,
+ static_cast<VkDebugReportCallbackEXT>( callback ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
+ uint64_t object,
+ size_t location,
+ int32_t messageCode,
+ const char * pLayerPrefix,
+ const char * pMessage,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDebugReportMessageEXT( m_instance,
+ static_cast<VkDebugReportFlagsEXT>( flags ),
+ static_cast<VkDebugReportObjectTypeEXT>( objectType ),
+ object,
+ location,
+ messageCode,
+ pLayerPrefix,
+ pMessage );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
+ uint64_t object,
+ size_t location,
+ int32_t messageCode,
+ const std::string & layerPrefix,
+ const std::string & message,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDebugReportMessageEXT( m_instance,
+ static_cast<VkDebugReportFlagsEXT>( flags ),
+ static_cast<VkDebugReportObjectTypeEXT>( objectType ),
+ object,
+ location,
+ messageCode,
+ layerPrefix.c_str(),
+ message.c_str() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_debug_marker ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT(
+ const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>(
+ d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT(
+ const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
+ m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
+ m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_queue ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkVideoProfileKHR *>( pVideoProfile ),
+ reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
+ PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
+ Result result = static_cast<Result>(
+ d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
+ reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
+ return createResultValue(
+ result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const
+ {
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
+ Result result = static_cast<Result>(
+ d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
+ reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
+ return createResultValue(
+ result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
+ uint32_t * pVideoFormatPropertyCount,
+ VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
+ pVideoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
+ Dispatch const & d ) const
+ {
+ std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
+ uint32_t videoFormatPropertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
+ {
+ videoFormatProperties.resize( videoFormatPropertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) )
+ {
+ videoFormatProperties.resize( videoFormatPropertyCount );
+ }
+ return createResultValue(
+ result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ }
+
+ template <typename VideoFormatPropertiesKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
+ VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties(
+ videoFormatPropertiesKHRAllocator );
+ uint32_t videoFormatPropertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
+ {
+ videoFormatProperties.resize( videoFormatPropertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) )
+ {
+ videoFormatProperties.resize( videoFormatPropertyCount );
+ }
+ return createResultValue(
+ result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
@@ -108431,9 +110752,175 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyVideoSessionKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyVideoSessionKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyVideoSessionKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroyVideoSessionKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t * pVideoSessionMemoryRequirementsCount,
+ VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
+ m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ pVideoSessionMemoryRequirementsCount,
+ reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( pVideoSessionMemoryRequirements ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename VideoGetMemoryPropertiesKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
+ Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ Dispatch const & d ) const
+ {
+ std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements;
+ uint32_t videoSessionMemoryRequirementsCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
+ m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
+ {
+ videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
+ result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
+ m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ &videoSessionMemoryRequirementsCount,
+ reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
+ VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) &&
+ ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) )
+ {
+ videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
+ }
+ return createResultValue( result,
+ videoSessionMemoryRequirements,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ }
+
+ template <
+ typename VideoGetMemoryPropertiesKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
+ Device::getVideoSessionMemoryRequirementsKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements(
+ videoGetMemoryPropertiesKHRAllocator );
+ uint32_t videoSessionMemoryRequirementsCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
+ m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
+ {
+ videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
+ result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
+ m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ &videoSessionMemoryRequirementsCount,
+ reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
+ VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) &&
+ ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) )
+ {
+ videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
+ }
+ return createResultValue( result,
+ videoSessionMemoryRequirements,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t videoSessionBindMemoryCount,
+ const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>(
+ d.vkBindVideoSessionMemoryKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ videoSessionBindMemoryCount,
+ reinterpret_cast<const VkVideoBindMemoryKHR *>( pVideoSessionBindMemories ) ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindVideoSessionMemoryKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
+ Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkBindVideoSessionMemoryKHR(
+ m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ videoSessionBindMemories.size(),
+ reinterpret_cast<const VkVideoBindMemoryKHR *>( videoSessionBindMemories.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR(
const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
@@ -108491,350 +110978,441 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
+ return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
+ m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+ Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VideoSessionParametersUpdateInfoKHR & updateInfo,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
+ Result result = static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
+ m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
+ d.vkDestroyVideoSessionParametersKHR( m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>(
- d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+ d.vkDestroyVideoSessionParametersKHR(
+ m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ d.vkDestroyVideoSessionParametersKHR( m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-#else
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result =
- static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR,
- VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+ d.vkDestroyVideoSessionParametersKHR(
+ m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureKHR( m_device,
- static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureKHR(
- m_device,
- static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureKHR( m_device,
- static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureKHR(
- m_device,
- static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdControlVideoCodingKHR( m_commandBuffer,
+ reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV(
- m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdControlVideoCodingKHR( m_commandBuffer,
+ reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_decode_queue ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pFrameInfo ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyAccelerationStructureNV(
- m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_EXT_transform_feedback ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyBuffer(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
+ firstBinding,
+ bindingCount,
+ reinterpret_cast<const VkBuffer *>( pBuffers ),
+ reinterpret_cast<const VkDeviceSize *>( pOffsets ),
+ reinterpret_cast<const VkDeviceSize *>( pSizes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- d.vkDestroyBuffer( m_device,
- static_cast<VkBuffer>( buffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+ VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+# else
+ if ( buffers.size() != offsets.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+ }
+ if ( !sizes.empty() && buffers.size() != sizes.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
+ firstBinding,
+ buffers.size(),
+ reinterpret_cast<const VkBuffer *>( buffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
+ reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyBuffer(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
+ firstCounterBuffer,
+ counterBufferCount,
+ reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
+ reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT(
+ uint32_t firstCounterBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- d.vkDestroyBuffer( m_device,
- static_cast<VkBuffer>( buffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
+# else
+ if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+ {
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
+ firstCounterBuffer,
+ counterBuffers.size(),
+ reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyBufferView( m_device,
- static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
+ firstCounterBuffer,
+ counterBufferCount,
+ reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
+ reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT(
+ uint32_t firstCounterBuffer,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- d.vkDestroyBufferView( m_device,
- static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
+# else
+ if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+ {
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
+ firstCounterBuffer,
+ counterBuffers.size(),
+ reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
+ uint32_t index,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyBufferView( m_device,
- static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdBeginQueryIndexedEXT(
+ m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ uint32_t index,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyBufferView( m_device,
- static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
+ uint32_t firstInstance,
+ VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
+ uint32_t counterOffset,
+ uint32_t vertexStride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyCommandPool( m_device,
- static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
+ instanceCount,
+ firstInstance,
+ static_cast<VkBuffer>( counterBuffer ),
+ static_cast<VkDeviceSize>( counterBufferOffset ),
+ counterOffset,
+ vertexStride );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyCommandPool( m_device,
- static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NVX_binary_import ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyCommandPool( m_device,
- static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
+ reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
+ Device::createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyCommandPool( m_device,
- static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::CuModuleNVX module;
+ Result result = static_cast<Result>(
+ d.vkCreateCuModuleNVX( m_device,
+ reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
+ return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
+ Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyCuFunctionNVX( m_device,
- static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::CuModuleNVX module;
+ Result result = static_cast<Result>(
+ d.vkCreateCuModuleNVX( m_device,
+ reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>(
+ result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyCuFunctionNVX( m_device,
- static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return static_cast<Result>(
+ d.vkCreateCuFunctionNVX( m_device,
+ reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
+ Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyCuFunctionNVX( m_device,
- static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
+ Result result = static_cast<Result>(
+ d.vkCreateCuFunctionNVX( m_device,
+ reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
+ return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
+ Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyCuFunctionNVX( m_device,
- static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
+ Result result = static_cast<Result>(
+ d.vkCreateCuFunctionNVX( m_device,
+ reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>(
+ result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
@@ -108881,1208 +111459,1546 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuFunctionNVX( m_device,
+ static_cast<VkCuFunctionNVX>( function ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkDestroyCuFunctionNVX( m_device,
+ static_cast<VkCuFunctionNVX>( function ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuFunctionNVX( m_device,
+ static_cast<VkCuFunctionNVX>( function ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkDestroyCuFunctionNVX( m_device,
+ static_cast<VkCuFunctionNVX>( function ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorPool( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorPool( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NVX_image_view_handle ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorPool( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorPool( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>(
+ d.vkGetImageViewAddressNVX( m_device,
+ static_cast<VkImageView>( imageView ),
+ reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
+ Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
{
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
+ Result result = static_cast<Result>(
+ d.vkGetImageViewAddressNVX( m_device,
+ static_cast<VkImageView>( imageView ),
+ reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
+ return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_AMD_draw_indirect_count ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_AMD_shader_info ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ size_t * pInfoSize,
+ void * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorUpdateTemplate( m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ pInfoSize,
+ pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Uint8_tAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ Dispatch const & d ) const
{
- d.vkDestroyDescriptorUpdateTemplate(
- m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ std::vector<uint8_t, Uint8_tAllocator> info;
+ size_t infoSize;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && infoSize )
+ {
+ info.resize( infoSize );
+ result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ reinterpret_cast<void *>( info.data() ) ) );
+ VULKAN_HPP_ASSERT( infoSize <= info.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
+ {
+ info.resize( infoSize );
+ }
+ return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Uint8_tAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
+ Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ Uint8_tAllocator & uint8_tAllocator,
+ Dispatch const & d ) const
{
- d.vkDestroyDescriptorUpdateTemplate( m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
+ size_t infoSize;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && infoSize )
+ {
+ info.resize( infoSize );
+ result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ reinterpret_cast<void *>( info.data() ) ) );
+ VULKAN_HPP_ASSERT( infoSize <= info.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
+ {
+ info.resize( infoSize );
+ }
+ return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_GGP )
+ //=== VK_GGP_stream_descriptor_surface ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP(
+ const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDescriptorUpdateTemplate(
- m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
+ m_instance,
+ reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyDescriptorUpdateTemplateKHR( m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
+ m_instance,
+ reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyDescriptorUpdateTemplateKHR(
- m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
+ m_instance,
+ reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_GGP*/
+
+ //=== VK_NV_external_memory_capabilities ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV(
+ VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
+ reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
+ PhysicalDevice::getExternalImageFormatPropertiesNV(
+ VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
+ Dispatch const & d ) const
{
- d.vkDestroyDevice( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+ m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
+ reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
+ return createResultValue( result,
+ externalImageFormatProperties,
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyEvent(
- m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
- }
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_external_memory_win32 ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyEvent( m_device,
- static_cast<VkEvent>( event ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return static_cast<Result>(
+ d.vkGetMemoryWin32HandleNV( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
+ pHandle ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ Dispatch const & d ) const
{
- d.vkDestroyEvent(
- m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ HANDLE handle;
+ Result result =
+ static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
+ &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_get_physical_device_properties2 ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyEvent( m_device,
- static_cast<VkEvent>( event ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
+ PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFence(
- m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+ d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+ return features;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFence( m_device,
- static_cast<VkFence>( fence ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+ d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFence(
- m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
+ PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFence( m_device,
- static_cast<VkFence>( fence ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ return properties;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFramebuffer( m_device,
- static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ return structureChain;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFramebuffer( m_device,
- static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkGetPhysicalDeviceFormatProperties2KHR(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
+ PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFramebuffer( m_device,
- static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+ d.vkGetPhysicalDeviceFormatProperties2KHR(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ return formatProperties;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyFramebuffer( m_device,
- static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+ d.vkGetPhysicalDeviceFormatProperties2KHR(
+ m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyImage(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d ) const
{
- d.vkDestroyImage( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+ return createResultValue(
+ result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
+ Dispatch const & d ) const
{
- d.vkDestroyImage(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+ return createResultValue(
+ result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyImage( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+ m_physicalDevice,
+ pQueueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename QueueFamilyProperties2Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
{
- d.vkDestroyImageView(
- m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ return queueFamilyProperties;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename QueueFamilyProperties2Allocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyImageView( m_device,
- static_cast<VkImageView>( imageView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
+ queueFamilyProperties2Allocator );
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ return queueFamilyProperties;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
+ PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
{
- d.vkDestroyImageView(
- m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ queueFamilyProperties[i].pNext =
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ }
+ return returnVector;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename StructureChain,
+ typename StructureChainAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
+ PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator,
+ Dispatch const & d ) const
{
- d.vkDestroyImageView( m_device,
- static_cast<VkImageView>( imageView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ uint32_t queueFamilyPropertyCount;
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+ std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount,
+ structureChainAllocator );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ queueFamilyProperties[i].pNext =
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+ m_physicalDevice,
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+ {
+ returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ }
+ return returnVector;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNV( m_device,
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkGetPhysicalDeviceMemoryProperties2KHR(
+ m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
+ PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNV(
- m_device,
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+ d.vkGetPhysicalDeviceMemoryProperties2KHR(
+ m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ return memoryProperties;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNV( m_device,
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+ d.vkGetPhysicalDeviceMemoryProperties2KHR(
+ m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ return structureChain;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyIndirectCommandsLayoutNV(
- m_device,
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
+ pPropertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ Dispatch const & d ) const
{
- d.vkDestroyPipeline(
- m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ return properties;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <
+ typename SparseImageFormatProperties2Allocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2KHR(
+ const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyPipeline( m_device,
- static_cast<VkPipeline>( pipeline ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
+ sparseImageFormatProperties2Allocator );
+ uint32_t propertyCount;
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_device_group ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipeline(
- m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
+ heapIndex,
+ localDeviceIndex,
+ remoteDeviceIndex,
+ reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
+ Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipeline( m_device,
- static_cast<VkPipeline>( pipeline ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+ d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
+ heapIndex,
+ localDeviceIndex,
+ remoteDeviceIndex,
+ reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+ return peerMemoryFeatures;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipelineCache( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
+ uint32_t baseGroupY,
+ uint32_t baseGroupZ,
+ uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipelineCache( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdDispatchBaseKHR(
+ m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyPipelineCache( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
- }
+#if defined( VK_USE_PLATFORM_VI_NN )
+ //=== VK_NN_vi_surface ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipelineCache( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
+ reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyPipelineLayout( m_device,
- static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateViSurfaceNN( m_instance,
+ reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroyPipelineLayout( m_device,
- static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateViSurfaceNN( m_instance,
+ reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+ //=== VK_KHR_maintenance1 ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipelineLayout( m_device,
- static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkTrimCommandPoolKHR(
+ m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_KHR_device_group_creation ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR(
+ uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPipelineLayout( m_device,
- static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
+ m_instance,
+ pPhysicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
{
- d.vkDestroyPrivateDataSlotEXT( m_device,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+ uint32_t physicalDeviceGroupCount;
+ Result result;
+ do
+ {
+ result =
+ static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
+ m_instance,
+ &physicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ }
+ return createResultValue( result,
+ physicalDeviceGroupProperties,
+ VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <
+ typename PhysicalDeviceGroupPropertiesAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ Instance::enumeratePhysicalDeviceGroupsKHR(
+ PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
{
- d.vkDestroyPrivateDataSlotEXT( m_device,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
+ physicalDeviceGroupPropertiesAllocator );
+ uint32_t physicalDeviceGroupCount;
+ Result result;
+ do
+ {
+ result =
+ static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
+ m_instance,
+ &physicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ {
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ }
+ return createResultValue( result,
+ physicalDeviceGroupProperties,
+ VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_external_memory_capabilities ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPrivateDataSlotEXT( m_device,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
+ PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyPrivateDataSlotEXT( m_device,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+ return externalBufferProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_external_memory_win32 ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyQueryPool(
- m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
+ m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
{
- d.vkDestroyQueryPool( m_device,
- static_cast<VkQueryPool>( queryPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
+ m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyQueryPool(
- m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
+ m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ handle,
+ reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
+ Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ Dispatch const & d ) const
{
- d.vkDestroyQueryPool( m_device,
- static_cast<VkQueryPool>( queryPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+ Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
+ m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ handle,
+ reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
+ return createResultValue(
+ result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_external_memory_fd ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyRenderPass( m_device,
- static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>(
+ d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
+ Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
{
- d.vkDestroyRenderPass( m_device,
- static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ int fd;
+ Result result = static_cast<Result>(
+ d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
+ return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ int fd,
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyRenderPass( m_device,
- static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>(
+ d.vkGetMemoryFdPropertiesKHR( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ fd,
+ reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
+ Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ int fd,
+ Dispatch const & d ) const
{
- d.vkDestroyRenderPass( m_device,
- static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
+ Result result = static_cast<Result>(
+ d.vkGetMemoryFdPropertiesKHR( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ fd,
+ reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
+ return createResultValue(
+ result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_external_semaphore_capabilities ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySampler(
- m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
+ PhysicalDevice::getExternalSemaphorePropertiesKHR(
+ const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySampler( m_device,
- static_cast<VkSampler>( sampler ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+ return externalSemaphoreProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_external_semaphore_win32 ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
+ const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySampler(
- m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
+ m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
+ Dispatch const & d ) const
{
- d.vkDestroySampler( m_device,
- static_cast<VkSampler>( sampler ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
+ m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
+ const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversion( m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
+ m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
+ Dispatch const & d ) const
{
- d.vkDestroySamplerYcbcrConversion(
- m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
+ m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_external_semaphore_fd ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversion( m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkImportSemaphoreFdKHR(
+ m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
{
- d.vkDestroySamplerYcbcrConversion(
- m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR(
+ m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySamplerYcbcrConversionKHR( m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>(
+ d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
+ Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
{
- d.vkDestroySamplerYcbcrConversionKHR(
- m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ int fd;
+ Result result = static_cast<Result>(
+ d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
+ return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_push_descriptor ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ uint32_t descriptorWriteCount,
+ const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySemaphore(
- m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
+ static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
+ static_cast<VkPipelineLayout>( layout ),
+ set,
+ descriptorWriteCount,
+ reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySemaphore( m_device,
- static_cast<VkSemaphore>( semaphore ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
+ static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
+ static_cast<VkPipelineLayout>( layout ),
+ set,
+ descriptorWrites.size(),
+ reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR(
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ const void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySemaphore(
- m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ static_cast<VkPipelineLayout>( layout ),
+ set,
+ pData );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroySemaphore( m_device,
- static_cast<VkSemaphore>( semaphore ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_conditional_rendering ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
+ const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyShaderModule( m_device,
- static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdBeginConditionalRenderingEXT(
+ m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyShaderModule( m_device,
- static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdBeginConditionalRenderingEXT(
+ m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyShaderModule( m_device,
- static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_KHR_descriptor_update_template ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR(
+ const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyShaderModule( m_device,
- static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
+ m_device,
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroySwapchainKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
+ m_device,
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
+ return createResultValue(
+ result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkDestroySwapchainKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
+ m_device,
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
+ result,
+ descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique",
+ deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySwapchainKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplateKHR( m_device,
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroySwapchainKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkDestroyDescriptorUpdateTemplateKHR(
+ m_device,
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
+ static_cast<VkDescriptorSet>( descriptorSet ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ pData );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_clip_space_w_scaling ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdSetViewportWScalingNV( m_commandBuffer,
+ firstViewport,
+ viewportCount,
+ reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
+ uint32_t firstViewport,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdSetViewportWScalingNV( m_commandBuffer,
+ firstViewport,
+ viewportWScalings.size(),
+ reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_direct_mode_display ===
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
}
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ Result result =
+ static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+ //=== VK_EXT_acquire_xlib_display ===
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT(
+ Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>(
+ d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ PhysicalDevice::acquireXlibDisplayEXT( Display & dpy,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d ) const
{
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ Result result =
+ static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
+ RROutput rrOutput,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyVideoSessionParametersKHR( m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>(
+ d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
{
- d.vkDestroyVideoSessionParametersKHR(
- m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ VULKAN_HPP_NAMESPACE::DisplayKHR display;
+ Result result = static_cast<Result>(
+ d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
+ return createResultValue(
+ result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
{
- d.vkDestroyVideoSessionParametersKHR( m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ VULKAN_HPP_NAMESPACE::DisplayKHR display;
+ Result result = static_cast<Result>(
+ d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
+ ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
+ result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyVideoSessionParametersKHR(
- m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ //=== VK_EXT_display_surface_counter ===
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+ m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::waitIdle( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
+ PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+ m_physicalDevice,
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
+ return createResultValue(
+ result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_display_control ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
@@ -110110,1272 +113026,1720 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkFlushMappedMemoryRanges(
- m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ return static_cast<Result>(
+ d.vkRegisterDeviceEventEXT( m_device,
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkFence *>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges(
- m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>(
+ d.vkRegisterDeviceEventEXT( m_device,
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) ) );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBufferCount,
- reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>(
+ d.vkRegisterDeviceEventEXT( m_device,
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
+ result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBuffers.size(),
- reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+ return static_cast<Result>(
+ d.vkRegisterDisplayEventEXT( m_device,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkFence *>( pFence ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayEventInfoEXT & displayEventInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBufferCount,
- reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>(
+ d.vkRegisterDisplayEventEXT( m_device,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) ) );
+ return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const DisplayEventInfoEXT & displayEventInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBuffers.size(),
- reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>(
+ d.vkRegisterDisplayEventEXT( m_device,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
+ result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ uint64_t * pCounterValue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSetCount,
- reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
+ pCounterValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
+ Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSets.size(),
- reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" );
+ uint64_t counterValue;
+ Result result =
+ static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
+ &counterValue ) );
+ return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_GOOGLE_display_timing ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSetCount,
- reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
+ m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
+ Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSets.size(),
- reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" );
+ VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
+ Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
+ m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
+ return createResultValue(
+ result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pPresentationTimingCount,
+ VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkFreeMemory(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
+ m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ pPresentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
- d.vkFreeMemory( m_device,
- static_cast<VkDeviceMemory>( memory ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
+ uint32_t presentationTimingCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentationTimingCount )
+ {
+ presentationTimings.resize( presentationTimingCount );
+ result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
+ m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ &presentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
+ VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
+ {
+ presentationTimings.resize( presentationTimingCount );
+ }
+ return createResultValue(
+ result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+ }
+
+ template <
+ typename PastPresentationTimingGOOGLEAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ Device::getPastPresentationTimingGOOGLE(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
+ Dispatch const & d ) const
+ {
+ std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
+ pastPresentationTimingGOOGLEAllocator );
+ uint32_t presentationTimingCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentationTimingCount )
+ {
+ presentationTimings.resize( presentationTimingCount );
+ result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
+ m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ &presentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
+ VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
+ {
+ presentationTimings.resize( presentationTimingCount );
+ }
+ return createResultValue(
+ result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_discard_rectangles ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ uint32_t discardRectangleCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkFreeMemory(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
+ firstDiscardRectangle,
+ discardRectangleCount,
+ reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkFreeMemory( m_device,
- static_cast<VkDeviceMemory>( memory ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
+ firstDiscardRectangle,
+ discardRectangles.size(),
+ reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_hdr_metadata ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
- const uint32_t * pMaxPrimitiveCounts,
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
+ const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
+ const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetAccelerationStructureBuildSizesKHR(
- m_device,
- static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
- pMaxPrimitiveCounts,
- reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
+ d.vkSetHdrMetadataEXT( m_device,
+ swapchainCount,
+ reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ),
+ reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
- Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const AccelerationStructureBuildGeometryInfoKHR & buildInfo,
- ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void
+ Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
+ VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
# else
- if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
+ if ( swapchains.size() != metadata.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
- d.vkGetAccelerationStructureBuildSizesKHR(
- m_device,
- static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
- maxPrimitiveCounts.data(),
- reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
- return sizeInfo;
+
+ d.vkSetHdrMetadataEXT( m_device,
+ swapchains.size(),
+ reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
+ reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_create_renderpass2 ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR(
- m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>(
+ d.vkCreateRenderPass2KHR( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
- const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return d.vkGetAccelerationStructureDeviceAddressKHR(
- m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>(
+ d.vkCreateRenderPass2KHR( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
+ return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetAccelerationStructureHandleNV(
- m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>(
+ d.vkCreateRenderPass2KHR( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
+ result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename T, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
- Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
- std::vector<T, Allocator> data( dataSize / sizeof( T ) );
- Result result = static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+ d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
+ reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
}
- template <typename T, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
- Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin,
+ const SubpassBeginInfo & subpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- T data;
- Result result = static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- sizeof( T ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+ d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
+ reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
+ reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetAccelerationStructureMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
+ d.vkCmdNextSubpass2KHR( m_commandBuffer,
+ reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
+ reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
- Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo,
+ const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
- d.vkGetAccelerationStructureMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
- return memoryRequirements;
+ d.vkCmdNextSubpass2KHR( m_commandBuffer,
+ reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
+ reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
- d.vkGetAccelerationStructureMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
- return structureChain;
+ d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID(
- const struct AHardwareBuffer * buffer,
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
- m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
+ d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ //=== VK_KHR_shared_presentable_image ===
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
- Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
- Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
- m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+ return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
- Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
- m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
+ Result result =
+ static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+ result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_external_fence_capabilities ===
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<DeviceAddress>(
- d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
+ PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+ return externalFenceProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_external_fence_win32 ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
+ const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<DeviceAddress>(
- d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<Result>( d.vkImportFenceWin32HandleKHR(
+ m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
+ Dispatch const & d ) const
{
- return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR(
+ m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<DeviceAddress>(
- d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<Result>( d.vkGetFenceWin32HandleKHR(
+ m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
{
- return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ HANDLE handle;
+ Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR(
+ m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
+ return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_KHR_external_fence_fd ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetBufferMemoryRequirements(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+ return static_cast<Result>(
+ d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
- Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
- d.vkGetBufferMemoryRequirements(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
- return memoryRequirements;
+ Result result = static_cast<Result>(
+ d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetBufferMemoryRequirements2( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ return static_cast<Result>(
+ d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetBufferMemoryRequirements2( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return memoryRequirements;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
+ Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetBufferMemoryRequirements2( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return structureChain;
+ int fd;
+ Result result = static_cast<Result>(
+ d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
+ return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_performance_query ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ uint32_t * pCounterCount,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetBufferMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ pCounterCount,
+ reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Allocator, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetBufferMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return memoryRequirements;
+ std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions;
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ counters.size(),
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ counterDescriptions.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ counters.size(),
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( result == Result::eSuccess )
+ {
+ VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+ counterDescriptions.resize( counterCount );
+ }
+ return createResultValue( result,
+ counterDescriptions,
+ VULKAN_HPP_NAMESPACE_STRING
+ "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <
+ typename Allocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
+ Allocator const & vectorAllocator,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetBufferMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return structureChain;
+ std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions( vectorAllocator );
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ counters.size(),
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ counterDescriptions.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ counters.size(),
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( result == Result::eSuccess )
+ {
+ VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+ counterDescriptions.resize( counterCount );
+ }
+ return createResultValue( result,
+ counterDescriptions,
+ VULKAN_HPP_NAMESPACE_STRING
+ "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename PerformanceCounterKHRAllocator,
+ typename PerformanceCounterDescriptionKHRAllocator,
+ typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
+ Dispatch const & d ) const
{
- return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
+ data;
+ std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
+ data.second;
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ counters.resize( counterCount );
+ counterDescriptions.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ &counterCount,
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
+ VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
+ {
+ counters.resize( counterCount );
+ counterDescriptions.resize( counterCount );
+ }
+ return createResultValue(
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename PerformanceCounterKHRAllocator,
+ typename PerformanceCounterDescriptionKHRAllocator,
+ typename Dispatch,
+ typename B1,
+ typename B2,
+ typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
+ std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
+ int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
+ uint32_t queueFamilyIndex,
+ PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
+ PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
+ Dispatch const & d ) const
{
- return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
+ data( std::piecewise_construct,
+ std::forward_as_tuple( performanceCounterKHRAllocator ),
+ std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
+ std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
+ std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
+ data.second;
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ counters.resize( counterCount );
+ counterDescriptions.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ m_physicalDevice,
+ queueFamilyIndex,
+ &counterCount,
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
+ VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
+ {
+ counters.resize( counterCount );
+ counterDescriptions.resize( counterCount );
+ }
+ return createResultValue(
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
+ const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
+ uint32_t * pNumPasses,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
- reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+ d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ),
+ pNumPasses );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
+ const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
- reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ uint32_t numPasses;
+ d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
+ &numPasses );
+ return numPasses;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
- const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
- uint64_t * pTimestamps,
- uint64_t * pMaxDeviation,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR(
+ const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampCount,
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ),
- pTimestamps,
- pMaxDeviation ) );
+ d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- ArrayProxy<uint64_t> const & timestamps,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
-# else
- if ( timestampInfos.size() != timestamps.size() )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
- }
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- uint64_t maxDeviation;
- Result result = static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampInfos.size(),
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
- timestamps.data(),
- &maxDeviation ) );
- return createResultValue(
- result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+ Result result = static_cast<Result>(
+ d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Uint64_tAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- Device::getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
- std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
- std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
- uint64_t & maxDeviation = data.second;
- Result result = static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampInfos.size(),
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
- timestamps.data(),
- &maxDeviation ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+ d.vkReleaseProfilingLockKHR( m_device );
}
- template <typename Uint64_tAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- Device::getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Uint64_tAllocator & uint64_tAllocator,
- Dispatch const & d ) const
- {
- std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
- std::piecewise_construct,
- std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ),
- std::forward_as_tuple( 0 ) );
- std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
- uint64_t & maxDeviation = data.second;
- Result result = static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampInfos.size(),
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
- timestamps.data(),
- &maxDeviation ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_get_surface_capabilities2 ===
template <typename Dispatch>
- VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
}
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
+ PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
+ return createResultValue(
+ result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
}
-#else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+ Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
+ return createResultValue(
+ result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetDescriptorSetLayoutSupport( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ pSurfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
- Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
- d.vkGetDescriptorSetLayoutSupport( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
- return support;
+ std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ }
+ return createResultValue(
+ result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename SurfaceFormat2KHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
- structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
- d.vkGetDescriptorSetLayoutSupport( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
- return structureChain;
+ std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
+ uint32_t surfaceFormatCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
+ }
+ return createResultValue(
+ result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_get_display_properties2 ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetDescriptorSetLayoutSupportKHR( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
+ m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
- Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename DisplayProperties2KHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
- d.vkGetDescriptorSetLayoutSupportKHR( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
- return support;
+ std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result =
+ static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename DisplayProperties2KHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
- structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
- d.vkGetDescriptorSetLayoutSupportKHR( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
- return structureChain;
+ std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result =
+ static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
- VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetDeviceAccelerationStructureCompatibilityKHR(
- m_device,
- reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
- reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
- Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
- d.vkGetDeviceAccelerationStructureCompatibilityKHR(
- m_device,
- reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
- reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
- return compatibility;
+ std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename DisplayPlaneProperties2KHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneProperties2KHR(
+ DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
{
- d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+ std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties(
+ displayPlaneProperties2KHRAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
- Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
- d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
- return peerMemoryFeatures;
+ return static_cast<Result>(
+ d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ pPropertyCount,
+ reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
- d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+ std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>(
+ d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ &propertyCount,
+ reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
- Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename DisplayModeProperties2KHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayModeProperties2KHR(
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
- d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
- return peerMemoryFeatures;
+ std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties(
+ displayModeProperties2KHRAllocator );
+ uint32_t propertyCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
+ {
+ properties.resize( propertyCount );
+ result = static_cast<Result>(
+ d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ &propertyCount,
+ reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ {
+ properties.resize( propertyCount );
+ }
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR(
+ const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
- m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
+ return static_cast<Result>(
+ d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
+ reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
- Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
+ PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
- Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
- m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
+ Result result = static_cast<Result>(
+ d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
+ reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
return createResultValue(
- result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
+ result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+ //=== VK_MVK_ios_surface ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
- m_device,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+ return static_cast<Result>(
+ d.vkCreateIOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
- Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
- m_device,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
- return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateIOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateIOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+ //=== VK_MVK_macos_surface ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+ d.vkCreateMacOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
- Result result = static_cast<Result>(
- d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
- return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateMacOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkGetDeviceMemoryCommitment(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateMacOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ //=== VK_EXT_debug_utils ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT(
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
+ m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
- Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
- d.vkGetDeviceMemoryCommitment(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
- return committedMemoryInBytes;
+ Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
+ m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t
- Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT(
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetDeviceMemoryOpaqueCaptureAddress(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+ return static_cast<Result>(
+ d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
{
- return d.vkGetDeviceMemoryOpaqueCaptureAddress(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+ Result result = static_cast<Result>(
+ d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t
- Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+ d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR(
- const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+ d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetDeviceProcAddr( m_device, pName );
+ d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetDeviceProcAddr( m_device, name.c_str() );
+ d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VULKAN_HPP_NAMESPACE::Queue * pQueue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
+ d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
- Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Queue queue;
- d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
- return queue;
+ d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
- VULKAN_HPP_NAMESPACE::Queue * pQueue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetDeviceQueue2(
- m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
+ d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
- Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Queue queue;
- d.vkGetDeviceQueue2(
- m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
- return queue;
+ d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+ return static_cast<Result>(
+ d.vkCreateDebugUtilsMessengerEXT( m_instance,
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
+ Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
- { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+ Result result = static_cast<Result>(
+ d.vkCreateDebugUtilsMessengerEXT( m_instance,
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
+ return createResultValue(
+ result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
+ Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+ Result result = static_cast<Result>(
+ d.vkCreateDebugUtilsMessengerEXT( m_instance,
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>(
+ result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ d.vkDestroyDebugUtilsMessengerEXT( m_instance,
+ static_cast<VkDebugUtilsMessengerEXT>( messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
- Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- int fd;
- Result result = static_cast<Result>(
- d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
+ d.vkDestroyDebugUtilsMessengerEXT(
+ m_instance,
+ static_cast<VkDebugUtilsMessengerEXT>( messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+ d.vkDestroyDebugUtilsMessengerEXT( m_instance,
+ static_cast<VkDebugUtilsMessengerEXT>( messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+ d.vkDestroyDebugUtilsMessengerEXT(
+ m_instance,
+ static_cast<VkDebugUtilsMessengerEXT>( messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT(
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ d.vkSubmitDebugUtilsMessageEXT( m_instance,
+ static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
+ static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
+ reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const DebugUtilsMessengerCallbackDataEXT & callbackData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- HANDLE handle;
- Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
+ d.vkSubmitDebugUtilsMessageEXT( m_instance,
+ static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
+ static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
+ reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_ANDROID_external_memory_android_hardware_buffer ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID(
+ const struct AHardwareBuffer * buffer,
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetGeneratedCommandsMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
+ m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetGeneratedCommandsMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return memoryRequirements;
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
+ Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
+ m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
+ return createResultValue(
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetGeneratedCommandsMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return structureChain;
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
+ Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
+ m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
+ return createResultValue(
+ result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID(
+ const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
+ struct AHardwareBuffer ** pBuffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
- m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
+ m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
- Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
+ Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
- Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
- m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
+ struct AHardwareBuffer * buffer;
+ Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
+ m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
+ result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+ //=== VK_EXT_sample_locations ===
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetImageMemoryRequirements(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+ d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
+ reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
- Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
- d.vkGetImageMemoryRequirements(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
- return memoryRequirements;
+ d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
+ reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT(
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetImageMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
+ m_physicalDevice,
+ static_cast<VkSampleCountFlagBits>( samples ),
+ reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetImageMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return memoryRequirements;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
+ PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetImageMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
- return structureChain;
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
+ d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
+ m_physicalDevice,
+ static_cast<VkSampleCountFlagBits>( samples ),
+ reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+ return multisampleProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_get_memory_requirements2 ===
+
template <typename Dispatch>
VULKAN_HPP_INLINE void
Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
@@ -111416,131 +114780,41 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetImageSparseMemoryRequirements(
- m_device,
- static_cast<VkImage>( image ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2KHR( m_device,
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
- {
- std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
- d.vkGetImageSparseMemoryRequirements(
- m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements(
- m_device,
- static_cast<VkImage>( image ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
- return sparseMemoryRequirements;
- }
-
- template <
- typename SparseImageMemoryRequirementsAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- Device::getImageSparseMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
- Dispatch const & d ) const
- {
- std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
- sparseImageMemoryRequirementsAllocator );
- uint32_t sparseMemoryRequirementCount;
- d.vkGetImageSparseMemoryRequirements(
- m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements(
- m_device,
- static_cast<VkImage>( image ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
- return sparseMemoryRequirements;
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetImageSparseMemoryRequirements2(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
- d.vkGetImageSparseMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements2(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
- return sparseMemoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetBufferMemoryRequirements2KHR( m_device,
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return memoryRequirements;
}
- template <
- typename SparseImageMemoryRequirements2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements2(
- const ImageSparseMemoryRequirementsInfo2 & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d ) const
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
- sparseImageMemoryRequirements2Allocator );
- uint32_t sparseMemoryRequirementCount;
- d.vkGetImageSparseMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements2(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
- return sparseMemoryRequirements;
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetBufferMemoryRequirements2KHR( m_device,
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -111611,1560 +114885,920 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
- VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetImageSubresourceLayout( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource *>( pSubresource ),
- reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
- Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
- const ImageSubresource & subresource,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
- d.vkGetImageSubresourceLayout( m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<const VkImageSubresource *>( &subresource ),
- reinterpret_cast<VkSubresourceLayout *>( &layout ) );
- return layout;
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_acceleration_structure ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkGetImageViewAddressNVX( m_device,
- static_cast<VkImageView>( imageView ),
- reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
+ d.vkCreateAccelerationStructureKHR( m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
- Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
- Result result = static_cast<Result>(
- d.vkGetImageViewAddressNVX( m_device,
- static_cast<VkImageView>( imageView ),
- reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
- return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
- struct AHardwareBuffer ** pBuffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
- m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
- Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
+ Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- struct AHardwareBuffer * buffer;
- Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
- m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+ Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
+ m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
return createResultValue(
- result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
- Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
- {
- int fd;
- Result result = static_cast<Result>(
- d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkGetMemoryFdPropertiesKHR( m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- fd,
- reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
+ result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
- Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
+ Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
- Result result = static_cast<Result>(
- d.vkGetMemoryFdPropertiesKHR( m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- fd,
- reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
- return createResultValue(
- result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+ Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
+ m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>(
+ result,
+ accelerationStructure,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique",
+ deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- pHostPointer,
- reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
+ d.vkDestroyAccelerationStructureKHR( m_device,
+ static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
- Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
- Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
+ d.vkDestroyAccelerationStructureKHR(
m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- pHostPointer,
- reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
- return createResultValue(
- result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
+ static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
- m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
- {
- HANDLE handle;
- Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
- m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkGetMemoryWin32HandleNV( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
- pHandle ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- Dispatch const & d ) const
- {
- HANDLE handle;
- Result result =
- static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
- &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- handle,
- reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
- Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
- Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- handle,
- reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
- return createResultValue(
- result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
- m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
- Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
- Dispatch const & d ) const
- {
- zx_handle_t zirconHandle;
- Result result = static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
- m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
- return createResultValue(
- result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
-
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- zirconHandle,
- reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
+ d.vkDestroyAccelerationStructureKHR( m_device,
+ static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
- Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
- Result result = static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
+ d.vkDestroyAccelerationStructureKHR(
m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- zirconHandle,
- reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
- return createResultValue( result,
- memoryZirconHandleProperties,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
+ static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pPresentationTimingCount,
- VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- pPresentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
+ d.vkCmdBuildAccelerationStructuresKHR(
+ m_commandBuffer,
+ infoCount,
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
- Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
- uint32_t presentationTimingCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentationTimingCount )
- {
- presentationTimings.resize( presentationTimingCount );
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &presentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
- VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
+# else
+ if ( infos.size() != pBuildRangeInfos.size() )
{
- presentationTimings.resize( presentationTimingCount );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
- return createResultValue(
- result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
- }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- template <
- typename PastPresentationTimingGOOGLEAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
- Device::getPastPresentationTimingGOOGLE(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
- Dispatch const & d ) const
- {
- std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
- pastPresentationTimingGOOGLEAllocator );
- uint32_t presentationTimingCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentationTimingCount )
- {
- presentationTimings.resize( presentationTimingCount );
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &presentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
- VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
- {
- presentationTimings.resize( presentationTimingCount );
- }
- return createResultValue(
- result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+ d.vkCmdBuildAccelerationStructuresKHR(
+ m_commandBuffer,
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
+ const uint32_t * pIndirectStrides,
+ const uint32_t * const * ppMaxPrimitiveCounts,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetPerformanceParameterINTEL( m_device,
- static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
- reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
+ d.vkCmdBuildAccelerationStructuresIndirectKHR(
+ m_commandBuffer,
+ infoCount,
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
+ reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
+ pIndirectStrides,
+ ppMaxPrimitiveCounts );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
- Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
- Result result =
- static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device,
- static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
- reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
- return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- size_t * pDataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
+ ArrayProxy<const uint32_t> const & indirectStrides,
+ ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- std::vector<uint8_t, Uint8_tAllocator> data;
- size_t dataSize;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
- if ( ( result == Result::eSuccess ) && dataSize )
- {
- data.resize( dataSize );
- result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
+ VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
+ VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
+# else
+ if ( infos.size() != indirectDeviceAddresses.size() )
{
- data.resize( dataSize );
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
}
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
- }
-
- template <typename Uint8_tAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d ) const
- {
- std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
- size_t dataSize;
- Result result;
- do
+ if ( infos.size() != indirectStrides.size() )
{
- result = static_cast<Result>(
- d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
- if ( ( result == Result::eSuccess ) && dataSize )
- {
- data.resize( dataSize );
- result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
+ }
+ if ( infos.size() != pMaxPrimitiveCounts.size() )
{
- data.resize( dataSize );
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
}
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdBuildAccelerationStructuresIndirectKHR(
+ m_commandBuffer,
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
+ indirectStrides.data(),
+ pMaxPrimitiveCounts.data() );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pInternalRepresentationCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ return static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
- pInternalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infoCount,
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
- Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
- internalRepresentations;
- uint32_t internalRepresentationCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && internalRepresentationCount )
- {
- internalRepresentations.resize( internalRepresentationCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
- VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
+# else
+ if ( infos.size() != pBuildRangeInfos.size() )
{
- internalRepresentations.resize( internalRepresentationCount );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
- return createResultValue( result,
- internalRepresentations,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
- }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- template <
- typename PipelineExecutableInternalRepresentationKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
- Device::getPipelineExecutableInternalRepresentationsKHR(
- const PipelineExecutableInfoKHR & executableInfo,
- PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
- Dispatch const & d ) const
- {
- std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
- internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
- uint32_t internalRepresentationCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && internalRepresentationCount )
- {
- internalRepresentations.resize( internalRepresentationCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
- VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
- {
- internalRepresentations.resize( internalRepresentationCount );
- }
+ Result result = static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
return createResultValue( result,
- internalRepresentations,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
- uint32_t * pExecutableCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkGetPipelineExecutablePropertiesKHR( m_device,
- reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
- pExecutableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
+ d.vkCopyAccelerationStructureKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
- {
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
- uint32_t executableCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && executableCount )
- {
- properties.resize( executableCount );
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device,
- reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
- &executableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( executableCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
- {
- properties.resize( executableCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
- }
-
- template <
- typename PipelineExecutablePropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- Device::getPipelineExecutablePropertiesKHR(
- const PipelineInfoKHR & pipelineInfo,
- PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
- Dispatch const & d ) const
- {
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
- pipelineExecutablePropertiesKHRAllocator );
- uint32_t executableCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && executableCount )
- {
- properties.resize( executableCount );
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device,
- reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
- &executableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( executableCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
- {
- properties.resize( executableCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pStatisticCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkGetPipelineExecutableStatisticsKHR( m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
- pStatisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d ) const
- {
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
- uint32_t statisticCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && statisticCount )
- {
- statistics.resize( statisticCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
- VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
- {
- statistics.resize( statisticCount );
- }
- return createResultValue(
- result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
- }
-
- template <
- typename PipelineExecutableStatisticKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- Device::getPipelineExecutableStatisticsKHR(
- const PipelineExecutableInfoKHR & executableInfo,
- PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
- Dispatch const & d ) const
+ Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const CopyAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const
{
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
- pipelineExecutableStatisticKHRAllocator );
- uint32_t statisticCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && statisticCount )
- {
- statistics.resize( statisticCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
- VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
- {
- statistics.resize( statisticCount );
- }
- return createResultValue(
- result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+ Result result = static_cast<Result>(
+ d.vkCopyAccelerationStructureKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- uint64_t * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- pData );
+ return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
- Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d ) const
{
- uint64_t data;
- d.vkGetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- &data );
- return data;
+ Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void * pData,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- dataSize,
- pData,
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
+ return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- ArrayProxy<T> const & data,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const
- {
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- return createResultValue(
- result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
- }
-
- template <typename T, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T, Allocator>>
- Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
- std::vector<T, Allocator> data( dataSize / sizeof( T ) );
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- return createResultValue( result,
- data,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
- }
-
- template <typename T, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T>
- Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const
{
- T data;
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- sizeof( T ),
- reinterpret_cast<void *>( &data ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
+ Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
return createResultValue( result,
- data,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR(
+ uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
- m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
+ m_device,
+ accelerationStructureCount,
+ reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
+ static_cast<VkQueryType>( queryType ),
+ dataSize,
+ pData,
+ stride ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename T, typename Dispatch>
VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ ArrayProxy<T> const & data,
+ size_t stride,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
+ Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
+ m_device,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ),
+ stride ) );
return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+ VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
}
template <typename T, typename Allocator, typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
- Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d ) const
+ Device::writeAccelerationStructuresPropertiesKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
std::vector<T, Allocator> data( dataSize / sizeof( T ) );
- Result result = static_cast<Result>(
- d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
+ Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
+ m_device,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ),
+ stride ) );
return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
}
template <typename T, typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
- Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d ) const
+ Device::writeAccelerationStructuresPropertyKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d ) const
{
T data;
- Result result =
- static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- sizeof( T ),
- reinterpret_cast<void *>( &data ) ) );
+ Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
+ m_device,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( T ),
+ reinterpret_cast<void *>( &data ),
+ stride ) );
return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
- m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+ d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename T, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
- Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
- std::vector<T, Allocator> data( dataSize / sizeof( T ) );
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+ d.vkCmdCopyAccelerationStructureToMemoryKHR(
+ m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
}
- template <typename T, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
- Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- T data;
- Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- sizeof( T ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
+ d.vkCmdCopyAccelerationStructureToMemoryKHR(
+ m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV(
- m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ d.vkCmdCopyMemoryToAccelerationStructureKHR(
+ m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+ d.vkCmdCopyMemoryToAccelerationStructureKHR(
+ m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename T, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
- Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
- std::vector<T, Allocator> data( dataSize / sizeof( T ) );
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+ return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR(
+ m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
}
- template <typename T, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
- Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
+ const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- T data;
- Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- sizeof( T ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
+ return d.vkGetAccelerationStructureDeviceAddressKHR(
+ m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceSize
- Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t group,
- VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
+ uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR(
- m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR(
+ m_commandBuffer,
+ accelerationStructureCount,
+ reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR(
+ m_commandBuffer,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetDeviceAccelerationStructureCompatibilityKHR(
m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
+ reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
- Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
- Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+ d.vkGetDeviceAccelerationStructureCompatibilityKHR(
m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
- return createResultValue(
- result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
+ reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+ return compatibility;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
+ const uint32_t * pMaxPrimitiveCounts,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetRenderAreaGranularity(
- m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+ d.vkGetAccelerationStructureBuildSizesKHR(
+ m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
+ pMaxPrimitiveCounts,
+ reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
- Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
+ Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const AccelerationStructureBuildGeometryInfoKHR & buildInfo,
+ ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- VULKAN_HPP_NAMESPACE::Extent2D granularity;
- d.vkGetRenderAreaGranularity(
- m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
- return granularity;
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
+# else
+ if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
+ {
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
+ d.vkGetAccelerationStructureBuildSizesKHR(
+ m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
+ maxPrimitiveCounts.data(),
+ reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+ return sizeInfo;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_sampler_ycbcr_conversion ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ d.vkCreateSamplerYcbcrConversionKHR( m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
- Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- uint64_t value;
- Result result =
- static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
- return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
+ m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+ return createResultValue(
+ result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+ Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
+ m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
+ result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkDestroySamplerYcbcrConversionKHR( m_device,
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
- Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- uint64_t value;
- Result result =
- static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
- return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
+ d.vkDestroySamplerYcbcrConversionKHR(
+ m_device,
+ static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_bind_memory2 ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>( d.vkBindBufferMemory2KHR(
+ m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
- Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ Dispatch const & d ) const
{
- int fd;
- Result result = static_cast<Result>(
- d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
+ Result result = static_cast<Result>( d.vkBindBufferMemory2KHR(
+ m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::bindImageMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ return static_cast<Result>( d.vkBindImageMemory2KHR(
+ m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
+ Dispatch const & d ) const
{
- HANDLE handle;
- Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
+ Result result = static_cast<Result>( d.vkBindImageMemory2KHR(
+ m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_image_drm_format_modifier ===
-#if defined( VK_USE_PLATFORM_FUCHSIA )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
+ VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
+ return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
m_device,
- reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ),
- pZirconHandle ) );
+ static_cast<VkImage>( image ),
+ reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
- Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
+ Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
{
- zx_handle_t zirconHandle;
- Result result = static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
+ Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
m_device,
- reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ),
- &zirconHandle ) );
+ static_cast<VkImage>( image ),
+ reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
return createResultValue(
- result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_validation_cache ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- size_t * pInfoSize,
- void * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- pInfoSize,
- pInfo ) );
+ return static_cast<Result>(
+ d.vkCreateValidationCacheEXT( m_device,
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
+ Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- std::vector<uint8_t, Uint8_tAllocator> info;
- size_t infoSize;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && infoSize )
- {
- info.resize( infoSize );
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- reinterpret_cast<void *>( info.data() ) ) );
- VULKAN_HPP_ASSERT( infoSize <= info.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
- {
- info.resize( infoSize );
- }
- return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+ Result result = static_cast<Result>(
+ d.vkCreateValidationCacheEXT( m_device,
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
+ return createResultValue(
+ result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
}
- template <typename Uint8_tAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d ) const
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
+ Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
- size_t infoSize;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && infoSize )
- {
- info.resize( infoSize );
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- reinterpret_cast<void *>( info.data() ) ) );
- VULKAN_HPP_ASSERT( infoSize <= info.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
- {
- info.resize( infoSize );
- }
- return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+ Result result = static_cast<Result>(
+ d.vkCreateValidationCacheEXT( m_device,
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>(
+ result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- uint64_t * pCounterValue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
- pCounterValue ) );
+ d.vkDestroyValidationCacheEXT( m_device,
+ static_cast<VkValidationCacheEXT>( validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
- Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- uint64_t counterValue;
- Result result =
- static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
- &counterValue ) );
- return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
+ d.vkDestroyValidationCacheEXT( m_device,
+ static_cast<VkValidationCacheEXT>( validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pSwapchainImageCount,
- VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- pSwapchainImageCount,
- reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
+ d.vkDestroyValidationCacheEXT( m_device,
+ static_cast<VkValidationCacheEXT>( validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename ImageAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
- {
- std::vector<Image, ImageAllocator> swapchainImages;
- uint32_t swapchainImageCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && swapchainImageCount )
- {
- swapchainImages.resize( swapchainImageCount );
- result =
- static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &swapchainImageCount,
- reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
- VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
- {
- swapchainImages.resize( swapchainImageCount );
- }
- return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
- }
-
- template <typename ImageAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- ImageAllocator & imageAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<Image, ImageAllocator> swapchainImages( imageAllocator );
- uint32_t swapchainImageCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && swapchainImageCount )
- {
- swapchainImages.resize( swapchainImageCount );
- result =
- static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &swapchainImageCount,
- reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
- VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
- {
- swapchainImages.resize( swapchainImageCount );
- }
- return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+ d.vkDestroyValidationCacheEXT( m_device,
+ static_cast<VkValidationCacheEXT>( validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>(
+ d.vkMergeValidationCachesEXT( m_device,
+ static_cast<VkValidationCacheEXT>( dstCache ),
+ srcCacheCount,
+ reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
+ Dispatch const & d ) const
{
- Result result =
- static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
- return createResultValue(
- result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+ Result result = static_cast<Result>(
+ d.vkMergeValidationCachesEXT( m_device,
+ static_cast<VkValidationCacheEXT>( dstCache ),
+ srcCaches.size(),
+ reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -113245,961 +115879,1159 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_shading_rate_image ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t * pVideoSessionMemoryRequirementsCount,
- VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- pVideoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( pVideoSessionMemoryRequirements ) ) );
+ d.vkCmdBindShadingRateImageNV(
+ m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoGetMemoryPropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
+ uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements;
- uint32_t videoSessionMemoryRequirementsCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- &videoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
- VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) &&
- ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- }
- return createResultValue( result,
- videoSessionMemoryRequirements,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer,
+ firstViewport,
+ viewportCount,
+ reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
}
- template <
- typename VideoGetMemoryPropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- Device::getVideoSessionMemoryRequirementsKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
+ uint32_t firstViewport,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements(
- videoGetMemoryPropertiesKHRAllocator );
- uint32_t videoSessionMemoryRequirementsCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- &videoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
- VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) &&
- ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- }
- return createResultValue( result,
- videoSessionMemoryRequirements,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ d.vkCmdSetViewportShadingRatePaletteNV(
+ m_commandBuffer,
+ firstViewport,
+ shadingRatePalettes.size(),
+ reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ uint32_t customSampleOrderCount,
+ const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
+ d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
+ static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
+ customSampleOrderCount,
+ reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
+ VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>(
- d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
+ d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
+ static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
+ customSampleOrders.size(),
+ reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_ray_tracing ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkImportFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
+ return static_cast<Result>(
+ d.vkCreateAccelerationStructureNV( m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
+ Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+ Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
+ m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
+ return createResultValue(
+ result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
+ Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkImportSemaphoreFdKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+ Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
+ m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>(
+ result,
+ accelerationStructure,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique",
+ deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
+ d.vkDestroyAccelerationStructureNV( m_device,
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
+ d.vkDestroyAccelerationStructureNV(
+ m_device,
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
+ d.vkDestroyAccelerationStructureNV( m_device,
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#if defined( VK_USE_PLATFORM_FUCHSIA )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
+ d.vkDestroyAccelerationStructureNV(
m_device,
- reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importSemaphoreZirconHandleFUCHSIA(
- const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
+ d.vkGetAccelerationStructureMemoryRequirementsNV(
m_device,
- reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
+ Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkInitializePerformanceApiINTEL(
- m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
+ d.vkGetAccelerationStructureMemoryRequirementsNV(
+ m_device,
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+ return memoryRequirements;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo,
- Dispatch const & d ) const
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL(
- m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+ d.vkGetAccelerationStructureMemoryRequirementsNV(
+ m_device,
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
+ uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
- m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
+ m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::invalidateMappedMemoryRanges(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+ Device::bindAccelerationStructureMemoryNV(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
- m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+ Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
+ m_device,
+ bindInfos.size(),
+ reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
- void ** ppData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkMapMemory( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkDeviceSize>( size ),
- static_cast<VkMemoryMapFlags>( flags ),
- ppData ) );
+ d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
+ reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
+ static_cast<VkBuffer>( instanceData ),
+ static_cast<VkDeviceSize>( instanceOffset ),
+ static_cast<VkBool32>( update ),
+ static_cast<VkAccelerationStructureNV>( dst ),
+ static_cast<VkAccelerationStructureNV>( src ),
+ static_cast<VkBuffer>( scratch ),
+ static_cast<VkDeviceSize>( scratchOffset ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void *>::type
- Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- void * pData;
- Result result = static_cast<Result>( d.vkMapMemory( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkDeviceSize>( size ),
- static_cast<VkMemoryMapFlags>( flags ),
- &pData ) );
- return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
+ d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
+ reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
+ static_cast<VkBuffer>( instanceData ),
+ static_cast<VkDeviceSize>( instanceOffset ),
+ static_cast<VkBool32>( update ),
+ static_cast<VkAccelerationStructureNV>( dst ),
+ static_cast<VkAccelerationStructureNV>( src ),
+ static_cast<VkBuffer>( scratch ),
+ static_cast<VkDeviceSize>( scratchOffset ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkMergePipelineCaches( m_device,
- static_cast<VkPipelineCache>( dstCache ),
- srcCacheCount,
- reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
+ d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
+ static_cast<VkAccelerationStructureNV>( dst ),
+ static_cast<VkAccelerationStructureNV>( src ),
+ static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
+ VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
+ VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result =
- static_cast<Result>( d.vkMergePipelineCaches( m_device,
- static_cast<VkPipelineCache>( dstCache ),
- srcCaches.size(),
- reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
+ d.vkCmdTraceRaysNV( m_commandBuffer,
+ static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
+ static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
+ static_cast<VkBuffer>( missShaderBindingTableBuffer ),
+ static_cast<VkDeviceSize>( missShaderBindingOffset ),
+ static_cast<VkDeviceSize>( missShaderBindingStride ),
+ static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
+ static_cast<VkDeviceSize>( hitShaderBindingOffset ),
+ static_cast<VkDeviceSize>( hitShaderBindingStride ),
+ static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
+ static_cast<VkDeviceSize>( callableShaderBindingOffset ),
+ static_cast<VkDeviceSize>( callableShaderBindingStride ),
+ width,
+ height,
+ depth );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkMergeValidationCachesEXT( m_device,
- static_cast<VkValidationCacheEXT>( dstCache ),
- srcCacheCount,
- reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
- Dispatch const & d ) const
+ template <typename PipelineAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createRayTracingPipelinesNV(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkMergeValidationCachesEXT( m_device,
- static_cast<VkValidationCacheEXT>( dstCache ),
- srcCaches.size(),
- reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue(
+ result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename PipelineAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createRayTracingPipelinesNV(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkRegisterDeviceEventEXT( m_device,
- reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkFence *>( pFence ) ) );
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue(
+ result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
+ Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDeviceEventEXT( m_device,
- reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ return createResultValue(
+ result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDeviceEventEXT( m_device,
- reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
- result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Dispatch, typename PipelineAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createRayTracingPipelinesNVUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkRegisterDisplayEventEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkFence *>( pFence ) ) );
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result,
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayEventInfoEXT & displayEventInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ template <
+ typename Dispatch,
+ typename PipelineAllocator,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createRayTracingPipelinesNVUnique(
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDisplayEventEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ {
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
+ {
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+ }
+ }
+ return createResultValue(
+ result,
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayEventInfoEXT & displayEventInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
+ Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDisplayEventEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
- result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter );
+ return createResultValue<Pipeline, Dispatch>(
+ result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
+ deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
-# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
-# else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ ArrayProxy<T> const & data,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
+ Result result =
+ static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
+ Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T, Allocator> data( dataSize / sizeof( T ) );
+ Result result =
+ static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
}
-#else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d ) const
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
+ Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
+ T data;
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ sizeof( T ),
+ reinterpret_cast<void *>( &data ) ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- }
-#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
+ return static_cast<Result>( d.vkGetAccelerationStructureHandleNV(
+ m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ ArrayProxy<T> const & data,
+ Dispatch const & d ) const
{
- d.vkReleaseProfilingLockKHR( m_device );
+ Result result = static_cast<Result>(
+ d.vkGetAccelerationStructureHandleNV( m_device,
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
}
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
+ Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ size_t dataSize,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkResetCommandPool(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T, Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>(
+ d.vkGetAccelerationStructureHandleNV( m_device,
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
}
-#else
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
- Dispatch const & d ) const
+
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
+ Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkResetCommandPool(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
+ T data;
+ Result result = static_cast<Result>(
+ d.vkGetAccelerationStructureHandleNV( m_device,
+ static_cast<VkAccelerationStructureNV>( accelerationStructure ),
+ sizeof( T ),
+ reinterpret_cast<void *>( &data ) ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
+ uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkResetDescriptorPool(
- m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+ d.vkCmdWriteAccelerationStructuresPropertiesNV(
+ m_commandBuffer,
+ accelerationStructureCount,
+ reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
-#else
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkResetDescriptorPool(
- m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" );
+ d.vkCmdWriteAccelerationStructuresPropertiesNV(
+ m_commandBuffer,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+ return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
+ Result result =
+ static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_maintenance3 ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount,
- const VULKAN_HPP_NAMESPACE::Fence * pFences,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
+ d.vkGetDescriptorSetLayoutSupportKHR( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
+ Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>(
- d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+ d.vkGetDescriptorSetLayoutSupportKHR( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ return support;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+ d.vkGetDescriptorSetLayoutSupportKHR( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ return structureChain;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
- }
+ //=== VK_KHR_draw_indirect_count ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
+ d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
+ d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_external_memory_host ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
+ return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
+ m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ pHostPointer,
+ reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
+ Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+ Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
+ m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ pHostPointer,
+ reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
+ return createResultValue(
+ result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
- }
-#else
+ //=== VK_AMD_buffer_marker ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ uint32_t marker,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
+ d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
+ static_cast<VkPipelineStageFlagBits>( pipelineStage ),
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ marker );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_calibrated_timestamps ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
- const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
- const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
+ VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkSetHdrMetadataEXT( m_device,
- swapchainCount,
- reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ),
- reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+ m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ template <typename TimeDomainEXTAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
-# else
- if ( swapchains.size() != metadata.size() )
+ std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
+ uint32_t timeDomainCount;
+ Result result;
+ do
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && timeDomainCount )
+ {
+ timeDomains.resize( timeDomainCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+ m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
+ VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
+ {
+ timeDomains.resize( timeDomainCount );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
-
- d.vkSetHdrMetadataEXT( m_device,
- swapchains.size(),
- reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
- reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
- VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkSetLocalDimmingAMD(
- m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
+ return createResultValue(
+ result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
}
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- uint64_t data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- data ) );
- }
-#else
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
- uint64_t data,
- Dispatch const & d ) const
+ template <typename TimeDomainEXTAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
- data ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
+ std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
+ uint32_t timeDomainCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && timeDomainCount )
+ {
+ timeDomains.resize( timeDomainCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+ m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
+ VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
+ {
+ timeDomains.resize( timeDomainCount );
+ }
+ return createResultValue(
+ result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore(
- const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
+ const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
+ uint64_t * pTimestamps,
+ uint64_t * pMaxDeviation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+ d.vkGetCalibratedTimestampsEXT( m_device,
+ timestampCount,
+ reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ),
+ pTimestamps,
+ pMaxDeviation ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ ArrayProxy<uint64_t> const & timestamps,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
+# else
+ if ( timestampInfos.size() != timestamps.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ uint64_t maxDeviation;
+ Result result = static_cast<Result>(
+ d.vkGetCalibratedTimestampsEXT( m_device,
+ timestampInfos.size(),
+ reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
+ timestamps.data(),
+ &maxDeviation ) );
+ return createResultValue(
+ result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename Uint64_tAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ Device::getCalibratedTimestampsEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+ std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
+ std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
+ std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
+ uint64_t & maxDeviation = data.second;
+ Result result = static_cast<Result>(
+ d.vkGetCalibratedTimestampsEXT( m_device,
+ timestampInfos.size(),
+ reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
+ timestamps.data(),
+ &maxDeviation ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+ template <typename Uint64_tAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ Device::getCalibratedTimestampsEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Uint64_tAllocator & uint64_tAllocator,
+ Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
+ std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
+ std::piecewise_construct,
+ std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ),
+ std::forward_as_tuple( 0 ) );
+ std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
+ uint64_t & maxDeviation = data.second;
+ Result result = static_cast<Result>(
+ d.vkGetCalibratedTimestampsEXT( m_device,
+ timestampInfos.size(),
+ reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
+ timestamps.data(),
+ &maxDeviation ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkTrimCommandPool(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
- }
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkTrimCommandPoolKHR(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
- }
+ //=== VK_NV_mesh_shader ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount,
+ uint32_t firstTask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkUninitializePerformanceApiINTEL( m_device );
+ d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+ d.vkCmdDrawMeshTasksIndirectNV(
+ m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkUpdateDescriptorSetWithTemplate( m_device,
- static_cast<VkDescriptorSet>( descriptorSet ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- pData );
+ d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
- static_cast<VkDescriptorSet>( descriptorSet ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- pData );
- }
+ //=== VK_NV_scissor_exclusive ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSets( uint32_t descriptorWriteCount,
- const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ uint32_t exclusiveScissorCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkUpdateDescriptorSets( m_device,
- descriptorWriteCount,
- reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
- descriptorCopyCount,
- reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
+ d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
+ firstExclusiveScissor,
+ exclusiveScissorCount,
+ reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkUpdateDescriptorSets( m_device,
- descriptorWrites.size(),
- reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
- descriptorCopies.size(),
- reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
+ d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
+ firstExclusiveScissor,
+ exclusiveScissors.size(),
+ reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_NV_device_diagnostic_checkpoints ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
- m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
+ d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VideoSessionParametersUpdateInfoKHR & updateInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
+ VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
- m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
+ d.vkGetQueueCheckpointDataNV(
+ m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount,
- const VULKAN_HPP_NAMESPACE::Fence * pFences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CheckpointDataNVAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
+ Queue::getCheckpointDataNV( Dispatch const & d ) const
{
- return static_cast<Result>( d.vkWaitForFences(
- m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
+ std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
+ uint32_t checkpointDataCount;
+ d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+ checkpointData.resize( checkpointDataCount );
+ d.vkGetQueueCheckpointDataNV(
+ m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ return checkpointData;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d ) const
+ template <typename CheckpointDataNVAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
+ Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
{
- Result result = static_cast<Result>( d.vkWaitForFences( m_device,
- fences.size(),
- reinterpret_cast<const VkFence *>( fences.data() ),
- static_cast<VkBool32>( waitAll ),
- timeout ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+ std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
+ uint32_t checkpointDataCount;
+ d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+ checkpointData.resize( checkpointDataCount );
+ d.vkGetQueueCheckpointDataNV(
+ m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ return checkpointData;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_timeline_semaphore ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR(
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+ d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo,
- uint64_t timeout,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
+ Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
{
- Result result = static_cast<Result>(
- d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+ uint64_t value;
+ Result result =
+ static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
+ return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -114228,476 +117060,265 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- void * pData,
- size_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR(
+ const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructureCount,
- reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
- static_cast<VkQueryType>( queryType ),
- dataSize,
- pData,
- stride ) );
+ return static_cast<Result>(
+ d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- ArrayProxy<T> const & data,
- size_t stride,
- Dispatch const & d ) const
- {
- Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ),
- stride ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
- }
-
- template <typename T, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
- Device::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- size_t stride,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
- std::vector<T, Allocator> data( dataSize / sizeof( T ) );
- Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ),
- stride ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
- }
-
- template <typename T, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
- Device::writeAccelerationStructuresPropertyKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t stride,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
{
- T data;
- Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- sizeof( T ),
- reinterpret_cast<void *>( &data ),
- stride ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
+ Result result = static_cast<Result>(
+ d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_INTEL_performance_query ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
+ const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateAndroidSurfaceKHR( m_instance,
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkInitializePerformanceApiINTEL(
+ m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateAndroidSurfaceKHR( m_instance,
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
+ Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL(
+ m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateAndroidSurfaceKHR( m_instance,
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter );
+ d.vkUninitializePerformanceApiINTEL( m_device );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateDebugReportCallbackEXT( m_instance,
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
+ return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
+ m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
- Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
- Result result = static_cast<Result>(
- d.vkCreateDebugReportCallbackEXT( m_instance,
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
- return createResultValue(
- result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
+ Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
+ m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
- Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
- Result result = static_cast<Result>(
- d.vkCreateDebugReportCallbackEXT( m_instance,
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>(
- result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter );
+ return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
+ m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo,
+ Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkCreateDebugUtilsMessengerEXT( m_instance,
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
+ Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
+ m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
- Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
- Result result = static_cast<Result>(
- d.vkCreateDebugUtilsMessengerEXT( m_instance,
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
- return createResultValue(
- result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
+ return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
+ m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
- Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
- Result result = static_cast<Result>(
- d.vkCreateDebugUtilsMessengerEXT( m_instance,
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>(
- result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter );
+ Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
+ m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL(
+ const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateDirectFBSurfaceEXT( m_instance,
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
+ m_device,
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
+ Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDirectFBSurfaceEXT( m_instance,
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+ Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
+ m_device,
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
+ return createResultValue(
+ result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
+ Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDirectFBSurfaceEXT( m_instance,
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter );
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+ Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
+ m_device,
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
+ ObjectRelease<Device, Dispatch> deleter( *this, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>(
+ result,
+ configuration,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique",
+ deleter );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL(
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
+ m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
+ Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
+ m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release(
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
+ return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
+ m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
{
- return static_cast<Result>(
- d.vkCreateHeadlessSurfaceEXT( m_instance,
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
+ m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL(
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateHeadlessSurfaceEXT( m_instance,
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
+ return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
+ m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
+#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateHeadlessSurfaceEXT( m_instance,
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter );
+ Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
+ m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_IOS_MVK )
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
+ VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkCreateIOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ d.vkGetPerformanceParameterINTEL( m_device,
+ static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
+ reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
+ Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateIOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
+ VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
+ Result result =
+ static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device,
+ static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
+ reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
+ return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_AMD_display_native_hdr ===
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
+ VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateIOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter );
+ d.vkSetLocalDimmingAMD(
+ m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_imagepipe_surface ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA(
const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
@@ -114754,63 +117375,9 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
-#if defined( VK_USE_PLATFORM_MACOS_MVK )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateMacOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateMacOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateMacOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_surface ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
@@ -114866,4776 +117433,2864 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_METAL_EXT*/
-#if defined( VK_USE_PLATFORM_SCREEN_QNX )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateScreenSurfaceQNX( m_instance,
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateScreenSurfaceQNX( m_instance,
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateScreenSurfaceQNX( m_instance,
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_KHR_fragment_shading_rate ===
-#if defined( VK_USE_PLATFORM_GGP )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP(
- const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR(
+ uint32_t * pFragmentShadingRateCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
- m_instance,
- reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
+ m_physicalDevice,
+ pFragmentShadingRateCount,
+ reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
- m_instance,
- reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
+ fragmentShadingRates;
+ uint32_t fragmentShadingRateCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
+ {
+ fragmentShadingRates.resize( fragmentShadingRateCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
+ m_physicalDevice,
+ &fragmentShadingRateCount,
+ reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
+ VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
+ {
+ fragmentShadingRates.resize( fragmentShadingRateCount );
+ }
return createResultValue(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
- m_instance,
- reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_GGP*/
-
-#if defined( VK_USE_PLATFORM_VI_NN )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
- reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateViSurfaceNN( m_instance,
- reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateViSurfaceNN( m_instance,
- reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_VI_NN*/
-
-#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateWaylandSurfaceKHR( m_instance,
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateWaylandSurfaceKHR( m_instance,
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateWaylandSurfaceKHR( m_instance,
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateWin32SurfaceKHR( m_instance,
- reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateWin32SurfaceKHR( m_instance,
- reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateWin32SurfaceKHR( m_instance,
- reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#if defined( VK_USE_PLATFORM_XCB_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateXcbSurfaceKHR( m_instance,
- reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateXcbSurfaceKHR( m_instance,
- reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateXcbSurfaceKHR( m_instance,
- reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#if defined( VK_USE_PLATFORM_XLIB_KHR )
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkCreateXlibSurfaceKHR( m_instance,
- reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateXlibSurfaceKHR( m_instance,
- reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
- }
-
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateXlibSurfaceKHR( m_instance,
- reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter );
- }
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char * pLayerPrefix,
- const char * pMessage,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDebugReportMessageEXT( m_instance,
- static_cast<VkDebugReportFlagsEXT>( flags ),
- static_cast<VkDebugReportObjectTypeEXT>( objectType ),
- object,
- location,
- messageCode,
- pLayerPrefix,
- pMessage );
+ result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const std::string & layerPrefix,
- const std::string & message,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
+ int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ PhysicalDevice::getFragmentShadingRatesKHR(
+ PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
+ Dispatch const & d ) const
{
- d.vkDebugReportMessageEXT( m_instance,
- static_cast<VkDebugReportFlagsEXT>( flags ),
- static_cast<VkDebugReportObjectTypeEXT>( objectType ),
- object,
- location,
- messageCode,
- layerPrefix.c_str(),
- message.c_str() );
+ std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
+ fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
+ uint32_t fragmentShadingRateCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
+ {
+ fragmentShadingRates.resize( fragmentShadingRateCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
+ m_physicalDevice,
+ &fragmentShadingRateCount,
+ reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
+ VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
+ {
+ fragmentShadingRates.resize( fragmentShadingRateCount );
+ }
+ return createResultValue(
+ result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
+ const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDebugReportCallbackEXT( m_instance,
- static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
+ reinterpret_cast<const VkExtent2D *>( pFragmentSize ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
+ const Extent2D & fragmentSize,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDebugReportCallbackEXT(
- m_instance,
- static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
+ reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyDebugReportCallbackEXT( m_instance,
- static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyDebugReportCallbackEXT(
- m_instance,
- static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_buffer_device_address ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT(
+ const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDebugUtilsMessengerEXT( m_instance,
- static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyDebugUtilsMessengerEXT(
- m_instance,
- static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyDebugUtilsMessengerEXT( m_instance,
- static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyDebugUtilsMessengerEXT(
- m_instance,
- static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_tooling_info ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
+ m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( pToolProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroyInstance( m_instance,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename PhysicalDeviceToolPropertiesEXTAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
+ PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
{
- d.vkDestroySurfaceKHR(
- m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties;
+ uint32_t toolCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && toolCount )
+ {
+ toolProperties.resize( toolCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
+ m_physicalDevice,
+ &toolCount,
+ reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
+ {
+ toolProperties.resize( toolCount );
+ }
+ return createResultValue(
+ result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <
+ typename PhysicalDeviceToolPropertiesEXTAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
+ PhysicalDevice::getToolPropertiesEXT(
+ PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const
{
- d.vkDestroySurfaceKHR( m_instance,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties(
+ physicalDeviceToolPropertiesEXTAllocator );
+ uint32_t toolCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && toolCount )
+ {
+ toolProperties.resize( toolCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
+ m_physicalDevice,
+ &toolCount,
+ reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
+ {
+ toolProperties.resize( toolCount );
+ }
+ return createResultValue(
+ result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroySurfaceKHR(
- m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkDestroySurfaceKHR( m_instance,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_cooperative_matrix ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance,
- pPhysicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+ m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
+ template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
+ typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
{
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
- uint32_t physicalDeviceGroupCount;
+ std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
+ uint32_t propertyCount;
Result result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+ m_physicalDevice,
+ &propertyCount,
+ reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ properties.resize( propertyCount );
}
return createResultValue(
- result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
}
template <
- typename PhysicalDeviceGroupPropertiesAllocator,
+ typename CooperativeMatrixPropertiesNVAllocator,
typename Dispatch,
typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- Instance::enumeratePhysicalDeviceGroups(
- PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ PhysicalDevice::getCooperativeMatrixPropertiesNV(
+ CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const
{
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
- physicalDeviceGroupPropertiesAllocator );
- uint32_t physicalDeviceGroupCount;
+ std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
+ cooperativeMatrixPropertiesNVAllocator );
+ uint32_t propertyCount;
Result result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = static_cast<Result>(
+ d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && propertyCount )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ properties.resize( propertyCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+ m_physicalDevice,
+ &propertyCount,
+ reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ properties.resize( propertyCount );
}
return createResultValue(
- result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_coverage_reduction_mode ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
+ uint32_t * pCombinationCount,
+ VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance,
- pPhysicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice,
+ pCombinationCount,
+ reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
+ template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
{
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
- uint32_t physicalDeviceGroupCount;
- Result result;
+ std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
+ uint32_t combinationCount;
+ Result result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice, &combinationCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && combinationCount )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ combinations.resize( combinationCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice,
+ &combinationCount,
+ reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
+ VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ combinations.resize( combinationCount );
}
return createResultValue( result,
- physicalDeviceGroupProperties,
- VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ combinations,
+ VULKAN_HPP_NAMESPACE_STRING
+ "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
}
- template <
- typename PhysicalDeviceGroupPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- Instance::enumeratePhysicalDeviceGroupsKHR(
- PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+ template <typename FramebufferMixedSamplesCombinationNVAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
+ int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
+ FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
+ Dispatch const & d ) const
{
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
- physicalDeviceGroupPropertiesAllocator );
- uint32_t physicalDeviceGroupCount;
+ std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
+ framebufferMixedSamplesCombinationNVAllocator );
+ uint32_t combinationCount;
Result result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice, &combinationCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && combinationCount )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ combinations.resize( combinationCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice,
+ &combinationCount,
+ reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
+ VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
{
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+ combinations.resize( combinationCount );
}
return createResultValue( result,
- physicalDeviceGroupProperties,
- VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ combinations,
+ VULKAN_HPP_NAMESPACE_STRING
+ "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_EXT_full_screen_exclusive ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
- VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumeratePhysicalDevices(
- m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ pPresentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceAllocator, typename Dispatch>
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename PresentModeKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- Instance::enumeratePhysicalDevices( Dispatch const & d ) const
+ typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d ) const
{
- std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
- uint32_t physicalDeviceCount;
+ std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
+ uint32_t presentModeCount;
Result result;
do
{
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentModeCount )
{
- physicalDevices.resize( physicalDeviceCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
- m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
- VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ presentModes.resize( presentModeCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
+ if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
{
- physicalDevices.resize( physicalDeviceCount );
+ presentModes.resize( presentModeCount );
}
return createResultValue(
- result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
}
- template <typename PhysicalDeviceAllocator,
+ template <typename PresentModeKHRAllocator,
typename Dispatch,
typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type>
+ typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ PresentModeKHRAllocator & presentModeKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
- uint32_t physicalDeviceCount;
+ std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
+ uint32_t presentModeCount;
Result result;
do
{
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && presentModeCount )
{
- physicalDevices.resize( physicalDeviceCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
- m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
- VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ presentModes.resize( presentModeCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
+ if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
{
- physicalDevices.resize( physicalDeviceCount );
+ presentModes.resize( presentModeCount );
}
return createResultValue(
- result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return d.vkGetInstanceProcAddr( m_instance, pName );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+ result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkSubmitDebugUtilsMessageEXT( m_instance,
- static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
- static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
- reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
+ return static_cast<Result>(
+ d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# else
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const DebugUtilsMessengerCallbackDataEXT & callbackData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
- d.vkSubmitDebugUtilsMessageEXT( m_instance,
- static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
- static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
- reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
+ Result result = static_cast<Result>(
+ d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV(
- VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>(
+ d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
# else
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+ Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
- Result result =
- static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
+ Result result = static_cast<Result>(
+ d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT(
- Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
+ m_device,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- PhysicalDevice::acquireXlibDisplayEXT( Display & dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d ) const
{
- Result result =
- static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+ Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
+ m_device,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
+ return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_EXT_headless_surface ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Device * pDevice,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
- reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDevice *>( pDevice ) ) );
+ return static_cast<Result>(
+ d.vkCreateHeadlessSurfaceEXT( m_instance,
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
- PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Device device;
- Result result = static_cast<Result>(
- d.vkCreateDevice( m_physicalDevice,
- reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDevice *>( &device ) ) );
- return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateHeadlessSurfaceEXT( m_instance,
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
- PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::Device device;
- Result result = static_cast<Result>(
- d.vkCreateDevice( m_physicalDevice,
- reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDevice *>( &device ) ) );
- ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>(
- result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter );
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateHeadlessSurfaceEXT( m_instance,
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_buffer_device_address ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR(
+ const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkCreateDisplayModeKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
- PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayModeCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
- Result result = static_cast<Result>(
- d.vkCreateDisplayModeKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
- return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
+ return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
- PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const DisplayModeCreateInfoKHR & createInfo,
- Optional<const AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR(
+ const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
- Result result = static_cast<Result>(
- d.vkCreateDisplayModeKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
- ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>(
- result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter );
+ return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
+ reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
- m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+ return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
+ reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename ExtensionPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t
+ Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
- m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
+ m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
}
- template <typename ExtensionPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
- ExtensionPropertiesAllocator & extensionPropertiesAllocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR(
+ const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
- m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
+ m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_line_rasterization ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor,
+ uint16_t lineStipplePattern,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+ d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename LayerPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
- {
- std::vector<LayerProperties, LayerPropertiesAllocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
- }
+ //=== VK_EXT_host_query_reset ===
- template <typename LayerPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_extended_dynamic_state ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- uint32_t * pCounterCount,
- VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- pCounterCount,
- reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
+ d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Allocator, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions;
- uint32_t counterCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- counters.size(),
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- nullptr ) );
- if ( ( result == Result::eSuccess ) && counterCount )
- {
- counterDescriptions.resize( counterCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- counters.size(),
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- }
- } while ( result == Result::eIncomplete );
- if ( result == Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
- counterDescriptions.resize( counterCount );
- }
- return createResultValue( result,
- counterDescriptions,
- VULKAN_HPP_NAMESPACE_STRING
- "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
}
- template <
- typename Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
- Allocator const & vectorAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions( vectorAllocator );
- uint32_t counterCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- counters.size(),
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- nullptr ) );
- if ( ( result == Result::eSuccess ) && counterCount )
- {
- counterDescriptions.resize( counterCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- counters.size(),
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- }
- } while ( result == Result::eIncomplete );
- if ( result == Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
- counterDescriptions.resize( counterCount );
- }
- return createResultValue( result,
- counterDescriptions,
- VULKAN_HPP_NAMESPACE_STRING
- "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
- template <typename PerformanceCounterKHRAllocator,
- typename PerformanceCounterDescriptionKHRAllocator,
- typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
- data;
- std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
- data.second;
- uint32_t counterCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
- if ( ( result == Result::eSuccess ) && counterCount )
- {
- counters.resize( counterCount );
- counterDescriptions.resize( counterCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- &counterCount,
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- VULKAN_HPP_ASSERT( counterCount <= counters.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
- {
- counters.resize( counterCount );
- counterDescriptions.resize( counterCount );
- }
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ d.vkCmdSetViewportWithCountEXT(
+ m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
- template <typename PerformanceCounterKHRAllocator,
- typename PerformanceCounterDescriptionKHRAllocator,
- typename Dispatch,
- typename B1,
- typename B2,
- typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
- std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
- PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
- Dispatch const & d ) const
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
- data( std::piecewise_construct,
- std::forward_as_tuple( performanceCounterKHRAllocator ),
- std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
- std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
- data.second;
- uint32_t counterCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
- if ( ( result == Result::eSuccess ) && counterCount )
- {
- counters.resize( counterCount );
- counterDescriptions.resize( counterCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- &counterCount,
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- VULKAN_HPP_ASSERT( counterCount <= counters.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
- {
- counters.resize( counterCount );
- counterDescriptions.resize( counterCount );
- }
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ d.vkCmdSetViewportWithCountEXT(
+ m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- pPropertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
+ d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
- {
- std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
- }
-
- template <typename DisplayModeProperties2KHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayModeProperties2KHR(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties(
- displayModeProperties2KHRAllocator );
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+ d.vkCmdSetScissorWithCountEXT(
+ m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- pPropertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
+ d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
+ firstBinding,
+ bindingCount,
+ reinterpret_cast<const VkBuffer *>( pBuffers ),
+ reinterpret_cast<const VkDeviceSize *>( pOffsets ),
+ reinterpret_cast<const VkDeviceSize *>( pSizes ),
+ reinterpret_cast<const VkDeviceSize *>( pStrides ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+ VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+ VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
+# else
+ if ( buffers.size() != offsets.size() )
{
- properties.resize( propertyCount );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
- }
-
- template <typename DisplayModePropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
- Dispatch const & d ) const
- {
- std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties(
- displayModePropertiesKHRAllocator );
- uint32_t propertyCount;
- Result result;
- do
+ if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+ }
+ if ( !strides.empty() && buffers.size() != strides.size() )
{
- properties.resize( propertyCount );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
+ firstBinding,
+ buffers.size(),
+ reinterpret_cast<const VkBuffer *>( buffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
+ reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
+ reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
- reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
- reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
+ d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
- PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
- Result result = static_cast<Result>(
- d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
- reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
- reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
- return createResultValue(
- result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+ d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
- static_cast<VkDisplayModeKHR>( mode ),
- planeIndex,
- reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
+ d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
- PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
- Result result = static_cast<Result>(
- d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
- static_cast<VkDisplayModeKHR>( mode ),
- planeIndex,
- reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
- return createResultValue(
- result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+ d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- uint32_t * pDisplayCount,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
- m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
+ d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ VULKAN_HPP_NAMESPACE::StencilOp failOp,
+ VULKAN_HPP_NAMESPACE::StencilOp passOp,
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<DisplayKHR, DisplayKHRAllocator> displays;
- uint32_t displayCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && displayCount )
- {
- displays.resize( displayCount );
- result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
- m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
- VULKAN_HPP_ASSERT( displayCount <= displays.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
- {
- displays.resize( displayCount );
- }
- return createResultValue(
- result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+ d.vkCmdSetStencilOpEXT( m_commandBuffer,
+ static_cast<VkStencilFaceFlags>( faceMask ),
+ static_cast<VkStencilOp>( failOp ),
+ static_cast<VkStencilOp>( passOp ),
+ static_cast<VkStencilOp>( depthFailOp ),
+ static_cast<VkCompareOp>( compareOp ) );
}
- template <typename DisplayKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- DisplayKHRAllocator & displayKHRAllocator,
- Dispatch const & d ) const
- {
- std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
- uint32_t displayCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && displayCount )
- {
- displays.resize( displayCount );
- result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
- m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
- VULKAN_HPP_ASSERT( displayCount <= displays.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
- {
- displays.resize( displayCount );
- }
- return createResultValue(
- result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_deferred_host_operations ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
- VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
+ return static_cast<Result>(
+ d.vkCreateDeferredOperationKHR( m_device,
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename TimeDomainEXTAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
+ Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
- std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
- uint32_t timeDomainCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && timeDomainCount )
- {
- timeDomains.resize( timeDomainCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
- VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
- {
- timeDomains.resize( timeDomainCount );
- }
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+ Result result = static_cast<Result>(
+ d.vkCreateDeferredOperationKHR( m_device,
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
return createResultValue(
- result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
}
- template <typename TimeDomainEXTAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
- Dispatch const & d ) const
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
+ Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
- std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
- uint32_t timeDomainCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && timeDomainCount )
- {
- timeDomains.resize( timeDomainCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
- VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
- {
- timeDomains.resize( timeDomainCount );
- }
- return createResultValue(
- result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+ Result result = static_cast<Result>(
+ d.vkCreateDeferredOperationKHR( m_device,
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>(
+ result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
+ d.vkDestroyDeferredOperationKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
- PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- m_physicalDevice,
- &propertyCount,
- reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ d.vkDestroyDeferredOperationKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <
- typename CooperativeMatrixPropertiesNVAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
- PhysicalDevice::getCooperativeMatrixPropertiesNV(
- CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
- cooperativeMatrixPropertiesNVAllocator );
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- m_physicalDevice,
- &propertyCount,
- reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ d.vkDestroyDeferredOperationKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
- uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
+ d.vkDestroyDeferredOperationKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
- uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
+ return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
{
- std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ Result result = static_cast<Result>(
+ d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename DisplayPlaneProperties2KHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneProperties2KHR(
- DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties(
- displayPlaneProperties2KHRAllocator );
- uint32_t propertyCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
- {
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
- {
- properties.resize( propertyCount );
- }
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ return static_cast<Result>(
+ d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+ {
+ Result result =
+ static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR,
+ VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_pipeline_executable_properties ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
+ uint32_t * pExecutableCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPipelineExecutablePropertiesKHR( m_device,
+ reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
+ pExecutableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
+ template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
{
- std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+ std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
+ uint32_t executableCount;
+ Result result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
+ m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && executableCount )
{
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( executableCount );
+ result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
+ &executableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( executableCount <= properties.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
{
- properties.resize( propertyCount );
+ properties.resize( executableCount );
}
return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
}
- template <typename DisplayPlanePropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPlanePropertiesKHR(
- DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
+ template <
+ typename PipelineExecutablePropertiesKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ Device::getPipelineExecutablePropertiesKHR(
+ const PipelineInfoKHR & pipelineInfo,
+ PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties(
- displayPlanePropertiesKHRAllocator );
- uint32_t propertyCount;
+ std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
+ pipelineExecutablePropertiesKHRAllocator );
+ uint32_t executableCount;
Result result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
+ m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && executableCount )
{
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ properties.resize( executableCount );
+ result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
+ &executableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
+ VULKAN_HPP_ASSERT( executableCount <= properties.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
{
- properties.resize( propertyCount );
+ properties.resize( executableCount );
}
return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pStatisticCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPipelineExecutableStatisticsKHR( m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
+ pStatisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayProperties2KHRAllocator, typename Dispatch>
+ template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
+ typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo,
+ Dispatch const & d ) const
{
- std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+ std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
+ uint32_t statisticCount;
+ Result result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && statisticCount )
{
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ statistics.resize( statisticCount );
+ result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
+ VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
{
- properties.resize( propertyCount );
+ statistics.resize( statisticCount );
}
return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
}
- template <typename DisplayProperties2KHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type>
+ template <
+ typename PipelineExecutableStatisticKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ Device::getPipelineExecutableStatisticsKHR(
+ const PipelineExecutableInfoKHR & executableInfo,
+ PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
- uint32_t propertyCount;
- Result result;
+ std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
+ pipelineExecutableStatisticKHRAllocator );
+ uint32_t statisticCount;
+ Result result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && statisticCount )
{
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ statistics.resize( statisticCount );
+ result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
+ VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
{
- properties.resize( propertyCount );
+ statistics.resize( statisticCount );
}
return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR(
+ const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pInternalRepresentationCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
+ pInternalRepresentationCount,
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
+ template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
+ typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
+ PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo,
+ Dispatch const & d ) const
{
- std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+ std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
+ internalRepresentations;
+ uint32_t internalRepresentationCount;
+ Result result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &internalRepresentationCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && internalRepresentationCount )
{
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ internalRepresentations.resize( internalRepresentationCount );
+ result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &internalRepresentationCount,
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
+ VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
{
- properties.resize( propertyCount );
+ internalRepresentations.resize( internalRepresentationCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ return createResultValue( result,
+ internalRepresentations,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
}
- template <typename DisplayPropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type>
+ template <
+ typename PipelineExecutableInternalRepresentationKHRAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
+ int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
+ PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ Device::getPipelineExecutableInternalRepresentationsKHR(
+ const PipelineExecutableInfoKHR & executableInfo,
+ PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
+ Dispatch const & d ) const
{
- std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
- uint32_t propertyCount;
- Result result;
+ std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
+ internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
+ uint32_t internalRepresentationCount;
+ Result result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &internalRepresentationCount,
+ nullptr ) );
+ if ( ( result == Result::eSuccess ) && internalRepresentationCount )
{
- properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ internalRepresentations.resize( internalRepresentationCount );
+ result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &internalRepresentationCount,
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
+ VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
}
} while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
+ if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
{
- properties.resize( propertyCount );
+ internalRepresentations.resize( internalRepresentationCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ return createResultValue( result,
+ internalRepresentations,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetPhysicalDeviceExternalBufferProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
- }
+ //=== VK_NV_device_generated_commands ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
- PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV(
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
- d.vkGetPhysicalDeviceExternalBufferProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
- return externalBufferProperties;
+ d.vkGetGeneratedCommandsMemoryRequirementsNV(
+ m_device,
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+ d.vkGetGeneratedCommandsMemoryRequirementsNV(
+ m_device,
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return memoryRequirements;
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
- PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
- d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
- return externalBufferProperties;
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
+ structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetGeneratedCommandsMemoryRequirementsNV(
+ m_device,
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV(
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceExternalFenceProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+ d.vkCmdPreprocessGeneratedCommandsNV(
+ m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
- PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
- d.vkGetPhysicalDeviceExternalFenceProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
- return externalFenceProperties;
+ d.vkCmdPreprocessGeneratedCommandsNV(
+ m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV(
+ VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+ d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
+ static_cast<VkBool32>( isPreprocessed ),
+ reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
- PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const GeneratedCommandsInfoNV & generatedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
- d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
- return externalFenceProperties;
+ d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
+ static_cast<VkBool32>( isPreprocessed ),
+ reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
- VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t groupIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
- reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
+ d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer,
+ static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
+ static_cast<VkPipeline>( pipeline ),
+ groupIndex );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
- PhysicalDevice::getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV(
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
- reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
- return createResultValue( result,
- externalImageFormatProperties,
- VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+ return static_cast<Result>(
+ d.vkCreateIndirectCommandsLayoutNV( m_device,
+ reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
+ Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- d.vkGetPhysicalDeviceExternalSemaphoreProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+ Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
+ m_device,
+ reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
+ return createResultValue(
+ result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
- PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
+ Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
- d.vkGetPhysicalDeviceExternalSemaphoreProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
- return externalSemaphoreProperties;
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+ Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
+ m_device,
+ reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
+ result,
+ indirectCommandsLayout,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique",
+ deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+ d.vkDestroyIndirectCommandsLayoutNV( m_device,
+ static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
- PhysicalDevice::getExternalSemaphorePropertiesKHR(
- const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
- d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
- return externalSemaphoreProperties;
+ d.vkDestroyIndirectCommandsLayoutNV(
+ m_device,
+ static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
+ d.vkDestroyIndirectCommandsLayoutNV( m_device,
+ static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
- PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
- d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
- return features;
+ d.vkDestroyIndirectCommandsLayoutNV(
+ m_device,
+ static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_private_data ===
+
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+ return static_cast<Result>(
+ d.vkCreatePrivateDataSlotEXT( m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPrivateDataSlotEXT *>( pPrivateDataSlot ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type
+ Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
- d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
- return features;
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
+ Result result = static_cast<Result>(
+ d.vkCreatePrivateDataSlotEXT( m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
+ return createResultValue(
+ result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type
+ Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
- d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
- return structureChain;
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
+ Result result = static_cast<Result>(
+ d.vkCreatePrivateDataSlotEXT( m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>(
+ result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+ d.vkDestroyPrivateDataSlotEXT( m_device,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
- d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
- return features;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
- d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
- return structureChain;
+ d.vkDestroyPrivateDataSlotEXT( m_device,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceFormatProperties(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
+ d.vkDestroyPrivateDataSlotEXT( m_device,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
- PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
- d.vkGetPhysicalDeviceFormatProperties(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
- return formatProperties;
+ d.vkDestroyPrivateDataSlotEXT( m_device,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ uint64_t data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceFormatProperties2(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+ return static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
+ static_cast<VkObjectType>( objectType ),
+ objectHandle,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ data ) );
}
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
- d.vkGetPhysicalDeviceFormatProperties2(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
- return formatProperties;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ uint64_t data,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
- d.vkGetPhysicalDeviceFormatProperties2(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
- return structureChain;
+ Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
+ static_cast<VkObjectType>( objectType ),
+ objectHandle,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ data ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ uint64_t * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceFormatProperties2KHR(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+ d.vkGetPrivateDataEXT( m_device,
+ static_cast<VkObjectType>( objectType ),
+ objectHandle,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
- d.vkGetPhysicalDeviceFormatProperties2KHR(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
- return formatProperties;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
+ Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
- d.vkGetPhysicalDeviceFormatProperties2KHR(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
- return structureChain;
+ uint64_t data;
+ d.vkGetPrivateDataEXT( m_device,
+ static_cast<VkObjectType>( objectType ),
+ objectHandle,
+ static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
+ &data );
+ return data;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_encode_queue ===
+
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR(
- uint32_t * pFragmentShadingRateCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice,
- pFragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
+ d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
- PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
- fragmentShadingRates;
- uint32_t fragmentShadingRateCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice,
- &fragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
- VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- }
- return createResultValue(
- result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+ d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
- PhysicalDevice::getFragmentShadingRatesKHR(
- PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
- Dispatch const & d ) const
- {
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
- fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
- uint32_t fragmentShadingRateCount;
- Result result;
- do
- {
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice,
- &fragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
- VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- }
- return createResultValue(
- result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_synchronization2 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
+ d.vkCmdSetEvent2KHR( m_commandBuffer,
+ static_cast<VkEvent>( event ),
+ reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
- PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ const DependencyInfoKHR & dependencyInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+ d.vkCmdSetEvent2KHR( m_commandBuffer,
+ static_cast<VkEvent>( event ),
+ reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
- PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+ d.vkCmdResetEvent2KHR(
+ m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2KHR>( stageMask ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::waitEvents2KHR( uint32_t eventCount,
+ const VULKAN_HPP_NAMESPACE::Event * pEvents,
+ const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+ d.vkCmdWaitEvents2KHR( m_commandBuffer,
+ eventCount,
+ reinterpret_cast<const VkEvent *>( pEvents ),
+ reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
- PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
- }
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
+# else
+ if ( events.size() != dependencyInfos.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d ) const
- {
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+ d.vkCmdWaitEvents2KHR( m_commandBuffer,
+ events.size(),
+ reinterpret_cast<const VkEvent *>( events.data() ),
+ reinterpret_cast<const VkDependencyInfoKHR *>( dependencyInfos.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+ d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
- PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
- d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
- return memoryProperties;
+ d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceMemoryProperties2(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+ d.vkCmdWriteTimestamp2KHR(
+ m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Queue::submit2KHR( uint32_t submitCount,
+ const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
- d.vkGetPhysicalDeviceMemoryProperties2(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
- return memoryProperties;
+ return static_cast<Result>( d.vkQueueSubmit2KHR(
+ m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2KHR *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
- d.vkGetPhysicalDeviceMemoryProperties2(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
- return structureChain;
+ Result result =
+ static_cast<Result>( d.vkQueueSubmit2KHR( m_queue,
+ submits.size(),
+ reinterpret_cast<const VkSubmitInfo2KHR *>( submits.data() ),
+ static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ uint32_t marker,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceMemoryProperties2KHR(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+ d.vkCmdWriteBufferMarker2AMD( m_commandBuffer,
+ static_cast<VkPipelineStageFlags2KHR>( stage ),
+ static_cast<VkBuffer>( dstBuffer ),
+ static_cast<VkDeviceSize>( dstOffset ),
+ marker );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
+ VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
- d.vkGetPhysicalDeviceMemoryProperties2KHR(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
- return memoryProperties;
+ d.vkGetQueueCheckpointData2NV(
+ m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CheckpointData2NVAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
+ Queue::getCheckpointData2NV( Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
- d.vkGetPhysicalDeviceMemoryProperties2KHR(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
- return structureChain;
+ std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
+ uint32_t checkpointDataCount;
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+ checkpointData.resize( checkpointDataCount );
+ d.vkGetQueueCheckpointData2NV(
+ m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ return checkpointData;
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ template <typename CheckpointData2NVAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
+ Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
{
- d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
- m_physicalDevice,
- static_cast<VkSampleCountFlagBits>( samples ),
- reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
+ std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
+ uint32_t checkpointDataCount;
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+ checkpointData.resize( checkpointDataCount );
+ d.vkGetQueueCheckpointData2NV(
+ m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ return checkpointData;
}
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_NV_fragment_shading_rate_enums ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
- PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV(
+ VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
- d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
- m_physicalDevice,
- static_cast<VkSampleCountFlagBits>( samples ),
- reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
- return multisampleProperties;
+ d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer,
+ static_cast<VkFragmentShadingRateNV>( shadingRate ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_KHR_copy_commands2 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pRectCount,
- VULKAN_HPP_NAMESPACE::Rect2D * pRects,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
+ d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( pCopyBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Rect2DAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
- {
- std::vector<Rect2D, Rect2DAllocator> rects;
- uint32_t rectCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && rectCount )
- {
- rects.resize( rectCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &rectCount,
- reinterpret_cast<VkRect2D *>( rects.data() ) ) );
- VULKAN_HPP_ASSERT( rectCount <= rects.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
- {
- rects.resize( rectCount );
- }
- return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
- }
-
- template <typename Rect2DAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Rect2DAllocator & rect2DAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator );
- uint32_t rectCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && rectCount )
- {
- rects.resize( rectCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &rectCount,
- reinterpret_cast<VkRect2D *>( rects.data() ) ) );
- VULKAN_HPP_ASSERT( rectCount <= rects.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
- {
- rects.resize( rectCount );
- }
- return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+ d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( &copyBufferInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
+ d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( pCopyImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
- PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
- d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
- return properties;
+ d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( &copyImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR(
+ const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+ d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
+ reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( pCopyBufferToImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
- return properties;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
- return structureChain;
+ d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
+ reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( &copyBufferToImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR(
+ const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+ d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
+ reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( pCopyImageToBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
- return properties;
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
- return structureChain;
+ d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
+ reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( &copyImageToBufferInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
- uint32_t * pNumPasses,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ),
- pNumPasses );
+ d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( pBlitImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
- const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- uint32_t numPasses;
- d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
- &numPasses );
- return numPasses;
+ d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( &blitImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void
- PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice,
- pQueueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+ d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( pResolveImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
- PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
- {
- std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- return queueFamilyProperties;
- }
-
- template <typename QueueFamilyPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
- PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties(
- queueFamilyPropertiesAllocator );
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- return queueFamilyProperties;
+ d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( &resolveImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- pQueueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
- }
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_acquire_winrt_display ===
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
+# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV(
+ VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- return queueFamilyProperties;
+ return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
}
-
- template <typename QueueFamilyProperties2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
- Dispatch const & d ) const
+# else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
- queueFamilyProperties2Allocator );
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- return queueFamilyProperties;
+ Result result =
+ static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
- PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
- }
- d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
- }
- return returnVector;
+ return static_cast<Result>(
+ d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
}
- template <typename StructureChain,
- typename StructureChainAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
- PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d ) const
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
{
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount,
- structureChainAllocator );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
- }
- d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
- }
- return returnVector;
+ VULKAN_HPP_NAMESPACE::DisplayKHR display;
+ Result result = static_cast<Result>(
+ d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
+ return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
{
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- pQueueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+ VULKAN_HPP_NAMESPACE::DisplayKHR display;
+ Result result = static_cast<Result>(
+ d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
+ ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
+ result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
- {
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- return queueFamilyProperties;
- }
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ //=== VK_EXT_directfb_surface ===
- template <typename QueueFamilyProperties2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
- queueFamilyProperties2Allocator );
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- return queueFamilyProperties;
+ return static_cast<Result>(
+ d.vkCreateDirectFBSurfaceEXT( m_instance,
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
- template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
- PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
- }
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
- }
- return returnVector;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateDirectFBSurfaceEXT( m_instance,
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
}
- template <typename StructureChain,
- typename StructureChainAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
- PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d ) const
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- uint32_t queueFamilyPropertyCount;
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount,
- structureChainAllocator );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
- }
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
- for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
- {
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
- }
- return returnVector;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ Result result = static_cast<Result>(
+ d.vkCreateDirectFBSurfaceEXT( m_instance,
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_SCREEN_QNX )
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
- uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
+ uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Bool32>(
- d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
+ d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
- uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
+ uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
+ return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetPhysicalDeviceSparseImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- pPropertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
- PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- Dispatch const & d ) const
- {
- std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
- uint32_t propertyCount;
- d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- nullptr );
- properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- return properties;
- }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
- template <
- typename SparseImageFormatPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
- PhysicalDevice::getSparseImageFormatProperties(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
- Dispatch const & d ) const
- {
- std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties(
- sparseImageFormatPropertiesAllocator );
- uint32_t propertyCount;
- d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- nullptr );
- properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- return properties;
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_ray_tracing_pipeline ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR(
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
- pPropertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+ d.vkCmdTraceRaysKHR( m_commandBuffer,
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
+ width,
+ height,
+ depth );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- Dispatch const & d ) const
- {
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
- uint32_t propertyCount;
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
- properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- return properties;
- }
-
- template <
- typename SparseImageFormatProperties2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2(
- const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
- sparseImageFormatProperties2Allocator );
- uint32_t propertyCount;
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
- properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- return properties;
+ d.vkCmdTraceRaysKHR( m_commandBuffer,
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
+ width,
+ height,
+ depth );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
- pPropertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+ return static_cast<Result>(
+ d.vkCreateRayTracingPipelinesKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- Dispatch const & d ) const
+ template <typename PipelineAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createRayTracingPipelinesKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
- uint32_t propertyCount;
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
- properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- return properties;
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue( result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
- template <
- typename SparseImageFormatProperties2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2KHR(
- const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d ) const
+ template <typename PipelineAllocator,
+ typename Dispatch,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
+ Device::createRayTracingPipelinesKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
- sparseImageFormatProperties2Allocator );
- uint32_t propertyCount;
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
- properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- return properties;
+ std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ return createResultValue( result,
+ pipelines,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
- uint32_t * pCombinationCount,
- VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
+ Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice,
- pCombinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ return createResultValue( result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
- PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch, typename PipelineAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createRayTracingPipelinesKHRUnique(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
- uint32_t combinationCount;
- Result result;
- do
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice, &combinationCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && combinationCount )
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
{
- combinations.resize( combinationCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice,
- &combinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
- VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
- {
- combinations.resize( combinationCount );
}
return createResultValue( result,
- combinations,
- VULKAN_HPP_NAMESPACE_STRING
- "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
- template <typename FramebufferMixedSamplesCombinationNVAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
- PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
- FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
- Dispatch const & d ) const
+ template <
+ typename Dispatch,
+ typename PipelineAllocator,
+ typename B,
+ typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createRayTracingPipelinesKHRUnique(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
- framebufferMixedSamplesCombinationNVAllocator );
- uint32_t combinationCount;
- Result result;
- do
+ std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ std::vector<Pipeline> pipelines( createInfos.size() );
+ Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice, &combinationCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && combinationCount )
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( size_t i = 0; i < createInfos.size(); i++ )
{
- combinations.resize( combinationCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice,
- &combinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
- VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
}
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
- {
- combinations.resize( combinationCount );
}
return createResultValue( result,
- combinations,
- VULKAN_HPP_NAMESPACE_STRING
- "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ std::move( uniquePipelines ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
+ Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
+ Pipeline pipeline;
+ Result result = static_cast<Result>(
+ d.vkCreateRayTracingPipelinesKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) ) );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<Pipeline, Dispatch>( result,
+ pipeline,
+ VULKAN_HPP_NAMESPACE_STRING
+ "::Device::createRayTracingPipelineKHRUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
+ deleter );
}
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
- PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+ return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
+ typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ ArrayProxy<T> const & data,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
+ Result result =
+ static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
- PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
+ Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T, Allocator> data( dataSize / sizeof( T ) );
+ Result result =
+ static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
return createResultValue(
- result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
}
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
+ Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities =
- structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+ T data;
+ Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ sizeof( T ),
+ reinterpret_cast<void *>( &data ) ) );
+ return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
+ return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
- PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ ArrayProxy<T> const & data,
+ Dispatch const & d ) const
{
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- pSurfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
+ Result result = static_cast<Result>(
+ d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
+ return createResultValue( result,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
}
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
- PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+ template <typename T, typename Allocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
+ Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ Dispatch const & d ) const
{
- std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
- uint32_t surfaceFormatCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
- {
- surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
+ VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+ std::vector<T, Allocator> data( dataSize / sizeof( T ) );
+ Result result = static_cast<Result>(
+ d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( T ),
+ reinterpret_cast<void *>( data.data() ) ) );
return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
}
- template <typename SurfaceFormat2KHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
- PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
- Dispatch const & d ) const
+ template <typename T, typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
+ Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ Dispatch const & d ) const
{
- std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
- uint32_t surfaceFormatCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
- {
- surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
+ T data;
+ Result result =
+ static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ firstGroup,
+ groupCount,
+ sizeof( T ),
+ reinterpret_cast<void *>( &data ) ) );
return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR(
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- pSurfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
+ d.vkCmdTraceRaysIndirectKHR(
+ m_commandBuffer,
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
+ static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SurfaceFormatKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
- {
- std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
- uint32_t surfaceFormatCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
- {
- surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
- return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
- }
-
- template <typename SurfaceFormatKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
- uint32_t surfaceFormatCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
- {
- surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
- return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+ d.vkCmdTraceRaysIndirectKHR(
+ m_commandBuffer,
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
+ static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- pPresentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
- }
-
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PresentModeKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
- {
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
- uint32_t presentModeCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
- {
- presentModes.resize( presentModeCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
- {
- presentModes.resize( presentModeCount );
- }
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
- }
-
- template <typename PresentModeKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- PresentModeKHRAllocator & presentModeKHRAllocator,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE DeviceSize
+ Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t group,
+ VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
- uint32_t presentModeCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
- {
- presentModes.resize( presentModeCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
- {
- presentModes.resize( presentModeCount );
- }
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+ return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- pPresentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PresentModeKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
- uint32_t presentModeCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
- {
- presentModes.resize( presentModeCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
- {
- presentModes.resize( presentModeCount );
- }
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+ d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
}
- template <typename PresentModeKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- PresentModeKHRAllocator & presentModeKHRAllocator,
- Dispatch const & d ) const
- {
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
- uint32_t presentModeCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
- {
- presentModes.resize( presentModeCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
- {
- presentModes.resize( presentModeCount );
- }
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_vertex_input_dynamic_state ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
+ uint32_t vertexBindingDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
+ uint32_t vertexAttributeDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
- queueFamilyIndex,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkBool32 *>( pSupported ) ) );
+ d.vkCmdSetVertexInputEXT(
+ m_commandBuffer,
+ vertexBindingDescriptionCount,
+ reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
+ vertexAttributeDescriptionCount,
+ reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
- PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
+ ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::Bool32 supported;
- Result result =
- static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
- queueFamilyIndex,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkBool32 *>( &supported ) ) );
- return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+ d.vkCmdSetVertexInputEXT(
+ m_commandBuffer,
+ vertexBindingDescriptions.size(),
+ reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
+ vertexAttributeDescriptions.size(),
+ reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
- m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( pToolProperties ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceToolPropertiesEXTAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
- PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
- {
- std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties;
- uint32_t toolCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && toolCount )
- {
- toolProperties.resize( toolCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
- m_physicalDevice,
- &toolCount,
- reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
- {
- toolProperties.resize( toolCount );
- }
- return createResultValue(
- result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
- }
-
- template <
- typename PhysicalDeviceToolPropertiesEXTAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
- PhysicalDevice::getToolPropertiesEXT(
- PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const
- {
- std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties(
- physicalDeviceToolPropertiesEXTAllocator );
- uint32_t toolCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && toolCount )
- {
- toolProperties.resize( toolCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
- m_physicalDevice,
- &toolCount,
- reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
- {
- toolProperties.resize( toolCount );
- }
- return createResultValue(
- result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_external_memory ===
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
- reinterpret_cast<const VkVideoProfileKHR *>( pVideoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
+ return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
+ m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
- PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const
- {
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
- Result result = static_cast<Result>(
- d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
- reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
- return createResultValue(
- result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
- }
-
- template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
+ Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
+ Dispatch const & d ) const
{
- StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities =
- structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
- Result result = static_cast<Result>(
- d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
- reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
+ zx_handle_t zirconHandle;
+ Result result = static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
+ m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+ result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
- uint32_t * pVideoFormatPropertyCount,
- VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
- pVideoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
+ return static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
+ m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ zirconHandle,
+ reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
- PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- Dispatch const & d ) const
- {
- std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
- uint32_t videoFormatPropertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
- VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- }
- return createResultValue(
- result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
- }
-
- template <typename VideoFormatPropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
- PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
- Dispatch const & d ) const
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
+ Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ Dispatch const & d ) const
{
- std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties(
- videoFormatPropertiesKHRAllocator );
- uint32_t videoFormatPropertyCount;
- Result result;
- do
- {
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
- VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
- }
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- }
- return createResultValue(
- result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
+ Result result = static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
+ m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ zirconHandle,
+ reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
+ return createResultValue( result,
+ memoryZirconHandleProperties,
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_external_semaphore ===
-#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
- uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
+ return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
+ m_device,
+ reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
- uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::importSemaphoreZirconHandleFUCHSIA(
+ const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const
{
- return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+ Result result = static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
+ m_device,
+ reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR(
- uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
- }
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#if defined( VK_USE_PLATFORM_XCB_KHR )
- template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
- xcb_connection_t * connection,
- xcb_visualid_t visual_id,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA(
+ const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
+ return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
+ m_device,
+ reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ),
+ pZirconHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
- xcb_connection_t & connection,
- xcb_visualid_t visual_id,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
+ Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
+ Dispatch const & d ) const
{
- return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+ zx_handle_t zirconHandle;
+ Result result = static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
+ m_device,
+ reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ),
+ &zirconHandle ) );
+ return createResultValue(
+ result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+ //=== VK_EXT_extended_dynamic_state2 ===
-#if defined( VK_USE_PLATFORM_XLIB_KHR )
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display * dpy,
- VisualID visualID,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
+ d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display & dpy,
- VisualID visualID,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+ d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
- RROutput rrOutput,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>(
- d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+ d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
}
-# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
- PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- return createResultValue(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
+ d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
}
-# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter );
+ d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
}
-# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ //=== VK_QNX_screen_surface ===
+
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>(
- d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+ d.vkCreateScreenSurfaceQNX( m_instance,
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
- PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DisplayKHR display;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>(
- d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
+ d.vkCreateScreenSurfaceQNX( m_instance,
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
+ typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo,
+ Optional<const AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
- VULKAN_HPP_NAMESPACE::DisplayKHR display;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>(
- d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter );
+ d.vkCreateScreenSurfaceQNX( m_instance,
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>(
+ static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
+ ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+ return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
+ result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
- }
-#else
- template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
- {
- Result result =
- static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
- VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetQueueCheckpointData2NV(
- m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CheckpointData2NVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
- Queue::getCheckpointData2NV( Dispatch const & d ) const
- {
- std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
- uint32_t checkpointDataCount;
- d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
- checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointData2NV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
- VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
- return checkpointData;
- }
-
- template <typename CheckpointData2NVAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
- Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
- {
- std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
- uint32_t checkpointDataCount;
- d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
- checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointData2NV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
- VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
- return checkpointData;
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
- VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkGetQueueCheckpointDataNV(
- m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CheckpointDataNVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
- Queue::getCheckpointDataNV( Dispatch const & d ) const
- {
- std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
- uint32_t checkpointDataCount;
- d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
- checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointDataNV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
- VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
- return checkpointData;
- }
-
- template <typename CheckpointDataNVAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
- Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
- {
- std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
- uint32_t checkpointDataCount;
- d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
- checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointDataNV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
- VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
- return checkpointData;
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Queue::bindSparse( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkQueueBindSparse( m_queue,
- bindInfoCount,
- reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ),
- static_cast<VkFence>( fence ) ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
- {
- Result result =
- static_cast<Result>( d.vkQueueBindSparse( m_queue,
- bindInfo.size(),
- reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkQueueEndDebugUtilsLabelEXT( m_queue );
- }
-
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
- }
-
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR(
- const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>(
- d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
- }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo,
- Dispatch const & d ) const
- {
- Result result =
- static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
- return createResultValue(
- result,
- VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
- m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- }
-#else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
+ uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
- m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
+ return static_cast<Bool32>(
+ d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
}
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount,
- const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
+ uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkQueueSubmit(
- m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
- {
- Result result = static_cast<Result>( d.vkQueueSubmit( m_queue,
- submits.size(),
- reinterpret_cast<const VkSubmitInfo *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_color_write_enable ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Queue::submit2KHR( uint32_t submitCount,
- const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkQueueSubmit2KHR(
- m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2KHR *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ d.vkCmdSetColorWriteEnableEXT(
+ m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
- {
- Result result =
- static_cast<Result>( d.vkQueueSubmit2KHR( m_queue,
- submits.size(),
- reinterpret_cast<const VkSubmitInfo2KHR *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
- }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
- {
- return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
- }
-#else
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::waitIdle( Dispatch const & d ) const
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+ d.vkCmdSetColorWriteEnableEXT(
+ m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
template <>
struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>
@@ -120981,6 +121636,14 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
+ struct StructExtends<PhysicalDeviceGlobalPriorityQueryFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>
{
enum
@@ -121761,6 +122424,22 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
+ struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, PhysicalDeviceFeatures2>
{
enum
@@ -122443,6 +123122,14 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
+ struct StructExtends<QueueFamilyGlobalPriorityPropertiesEXT, QueueFamilyProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>
{
enum
@@ -123093,7 +123780,7 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( !vulkanLibraryName.empty() )
{
-# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
+# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
# elif defined( _WIN32 )
m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
@@ -123103,7 +123790,7 @@ namespace VULKAN_HPP_NAMESPACE
}
else
{
-# if defined( __linux__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
+# if defined( __unix__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
if ( m_library == nullptr )
{
@@ -123147,7 +123834,7 @@ namespace VULKAN_HPP_NAMESPACE
{
if ( m_library )
{
-# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
+# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
dlclose( m_library );
# elif defined( _WIN32 )
::FreeLibrary( m_library );
@@ -123160,7 +123847,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT
{
-# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
+# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
return (T)dlsym( m_library, function );
# elif defined( _WIN32 )
return ( T )::GetProcAddress( m_library, function );
@@ -123175,7 +123862,7 @@ namespace VULKAN_HPP_NAMESPACE
}
private:
-# if defined( __linux__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
+# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
void * m_library;
# elif defined( _WIN32 )
::HINSTANCE m_library;
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
index b405699..2e9ef74 100644
--- a/include/vulkan/vulkan_core.h
+++ b/include/vulkan/vulkan_core.h
@@ -72,7 +72,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 179
+#define VK_HEADER_VERSION 180
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 2, VK_HEADER_VERSION)
@@ -787,6 +787,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = 1000314007,
VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008,
VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = 1000325000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001,
@@ -824,6 +825,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000,
VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = 1000388000,
+ VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = 1000388001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
@@ -8092,6 +8095,17 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV(
#endif
+#define VK_KHR_shader_subgroup_uniform_control_flow 1
+#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1
+#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow"
+typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderSubgroupUniformControlFlow;
+} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+
+
#define VK_KHR_zero_initialize_workgroup_memory 1
#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1
#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory"
@@ -11166,7 +11180,7 @@ typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT;
#define VK_EXT_validation_features 1
-#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 4
+#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5
#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features"
typedef enum VkValidationFeatureEnableEXT {
@@ -11186,6 +11200,7 @@ typedef enum VkValidationFeatureDisableEXT {
VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4,
VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5,
VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6,
+ VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7,
VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkValidationFeatureDisableEXT;
typedef struct VkValidationFeaturesEXT {
@@ -12281,6 +12296,25 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWrite
#endif
+#define VK_EXT_global_priority_query 1
+#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT 16U
+#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1
+#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query"
+typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 globalPriorityQuery;
+} VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+
+typedef struct VkQueueFamilyGlobalPriorityPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t priorityCount;
+ VkQueueGlobalPriorityEXT priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_EXT];
+} VkQueueFamilyGlobalPriorityPropertiesEXT;
+
+
+
#define VK_KHR_acceleration_structure 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR)
#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 11
diff --git a/registry/cgenerator.py b/registry/cgenerator.py
index f77e21a..5614602 100644
--- a/registry/cgenerator.py
+++ b/registry/cgenerator.py
@@ -391,45 +391,12 @@ class COutputGenerator(OutputGenerator):
self.appendSection(section, "\n" + body)
def genEnum(self, enuminfo, name, alias):
- """Generate enumerants.
+ """Generate the C declaration for a constant (a single <enum> value)."""
- <enum> tags may specify their values in several ways, but are usually
- just integers."""
OutputGenerator.genEnum(self, enuminfo, name, alias)
- (_, strVal) = self.enumToValue(enuminfo.elem, False)
-
- if self.misracppstyle() and enuminfo.elem.get('type') and not alias:
- # Generate e.g.: static constexpr uint32_t x = ~static_cast<uint32_t>(1U);
- # This appeases MISRA "underlying type" rules.
- typeStr = enuminfo.elem.get('type');
- invert = '~' in strVal
- number = strVal.strip("()~UL")
- if typeStr != "float":
- number += 'U'
- strVal = "~" if invert else ""
- strVal += "static_cast<" + typeStr + ">(" + number + ")"
- body = 'static constexpr ' + typeStr.ljust(9) + name.ljust(33) + ' {' + strVal + '};'
- self.appendSection('enum', body)
- elif enuminfo.elem.get('type') and not alias:
- # Generate e.g.: #define x (~0ULL)
- typeStr = enuminfo.elem.get('type');
- invert = '~' in strVal
- paren = '(' in strVal
- number = strVal.strip("()~UL")
- if typeStr != "float":
- if typeStr == "uint64_t":
- number += 'ULL'
- else:
- number += 'U'
- strVal = "~" if invert else ""
- strVal += number
- if paren:
- strVal = "(" + strVal + ")";
- body = '#define ' + name.ljust(33) + ' ' + strVal;
- self.appendSection('enum', body)
- else:
- body = '#define ' + name.ljust(33) + ' ' + strVal
- self.appendSection('enum', body)
+
+ body = self.buildConstantCDecl(enuminfo, name, alias)
+ self.appendSection('enum', body)
def genCmd(self, cmdinfo, name, alias):
"Command generation"
diff --git a/registry/generator.py b/registry/generator.py
index 6203c65..19bbc3c 100644
--- a/registry/generator.py
+++ b/registry/generator.py
@@ -691,6 +691,47 @@ class OutputGenerator:
return (section, '\n'.join(body))
+ def buildConstantCDecl(self, enuminfo, name, alias):
+ """Generate the C declaration for a constant (a single <enum>
+ value).
+
+ <enum> tags may specify their values in several ways, but are
+ usually just integers or floating-point numbers."""
+
+ (_, strVal) = self.enumToValue(enuminfo.elem, False)
+
+ if self.misracppstyle() and enuminfo.elem.get('type') and not alias:
+ # Generate e.g.: static constexpr uint32_t x = ~static_cast<uint32_t>(1U);
+ # This appeases MISRA "underlying type" rules.
+ typeStr = enuminfo.elem.get('type');
+ invert = '~' in strVal
+ number = strVal.strip("()~UL")
+ if typeStr != "float":
+ number += 'U'
+ strVal = "~" if invert else ""
+ strVal += "static_cast<" + typeStr + ">(" + number + ")"
+ body = 'static constexpr ' + typeStr.ljust(9) + name.ljust(33) + ' {' + strVal + '};'
+ elif enuminfo.elem.get('type') and not alias:
+ # Generate e.g.: #define x (~0ULL)
+ typeStr = enuminfo.elem.get('type');
+ invert = '~' in strVal
+ paren = '(' in strVal
+ number = strVal.strip("()~UL")
+ if typeStr != "float":
+ if typeStr == "uint64_t":
+ number += 'ULL'
+ else:
+ number += 'U'
+ strVal = "~" if invert else ""
+ strVal += number
+ if paren:
+ strVal = "(" + strVal + ")";
+ body = '#define ' + name.ljust(33) + ' ' + strVal;
+ else:
+ body = '#define ' + name.ljust(33) + ' ' + strVal
+
+ return body
+
def makeDir(self, path):
"""Create a directory, if not already done.
diff --git a/registry/validusage.json b/registry/validusage.json
index 87aa963..f9864c0 100644
--- a/registry/validusage.json
+++ b/registry/validusage.json
@@ -1,9 +1,9 @@
{
"version info": {
"schema version": 2,
- "api version": "1.2.179",
- "comment": "from git branch: github-main commit: 112e8218bb35cc70ba2cf67bdc61e3412c864eec",
- "date": "2021-05-24 07:24:39Z"
+ "api version": "1.2.180",
+ "comment": "from git branch: github-main commit: b4e8cd820b2487bc892b391fb26b49501473a6a6",
+ "date": "2021-06-06 12:24:39Z"
},
"validation": {
"vkGetInstanceProcAddr": {
@@ -314,7 +314,7 @@
},
{
"vuid": "VUID-VkQueueFamilyProperties2-pNext-pNext",
- "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkQueueFamilyCheckpointProperties2NV\">VkQueueFamilyCheckpointProperties2NV</a>, <a href=\"#VkQueueFamilyCheckpointPropertiesNV\">VkQueueFamilyCheckpointPropertiesNV</a>, or <a href=\"#VkVideoQueueFamilyProperties2KHR\">VkVideoQueueFamilyProperties2KHR</a>"
+ "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkQueueFamilyCheckpointProperties2NV\">VkQueueFamilyCheckpointProperties2NV</a>, <a href=\"#VkQueueFamilyCheckpointPropertiesNV\">VkQueueFamilyCheckpointPropertiesNV</a>, <a href=\"#VkQueueFamilyGlobalPriorityPropertiesEXT\">VkQueueFamilyGlobalPriorityPropertiesEXT</a>, or <a href=\"#VkVideoQueueFamilyProperties2KHR\">VkVideoQueueFamilyProperties2KHR</a>"
},
{
"vuid": "VUID-VkQueueFamilyProperties2-sType-unique",
@@ -322,6 +322,18 @@
}
]
},
+ "VkQueueFamilyGlobalPriorityPropertiesEXT": {
+ "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_global_priority_query)": [
+ {
+ "vuid": "VUID-VkQueueFamilyGlobalPriorityPropertiesEXT-sType-sType",
+ "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT</code>"
+ },
+ {
+ "vuid": "VUID-VkQueueFamilyGlobalPriorityPropertiesEXT-priorities-parameter",
+ "text": " Any given element of <code>priorities</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueueGlobalPriorityEXT\">VkQueueGlobalPriorityEXT</a> value"
+ }
+ ]
+ },
"VkQueueFamilyCheckpointProperties2NV": {
"(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [
{
@@ -580,7 +592,7 @@
},
{
"vuid": "VUID-VkDeviceCreateInfo-pNext-pNext",
- "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceDeviceMemoryReportCreateInfoEXT\">VkDeviceDeviceMemoryReportCreateInfoEXT</a>, <a href=\"#VkDeviceDiagnosticsConfigCreateInfoNV\">VkDeviceDiagnosticsConfigCreateInfoNV</a>, <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>, <a href=\"#VkDeviceMemoryOverallocationCreateInfoAMD\">VkDeviceMemoryOverallocationCreateInfoAMD</a>, <a href=\"#VkDevicePrivateDataCreateInfoEXT\">VkDevicePrivateDataCreateInfoEXT</a>, <a href=\"#VkPhysicalDevice16BitStorageFeatures\">VkPhysicalDevice16BitStorageFeatures</a>, <a href=\"#VkPhysicalDevice4444FormatsFeaturesEXT\">VkPhysicalDevice4444FormatsFeaturesEXT</a>, <a href=\"#VkPhysicalDevice8BitStorageFeatures\">VkPhysicalDevice8BitStorageFeatures</a>, <a href=\"#VkPhysicalDeviceASTCDecodeFeaturesEXT\">VkPhysicalDeviceASTCDecodeFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceAccelerationStructureFeaturesKHR\">VkPhysicalDeviceAccelerationStructureFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT\">VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeatures\">VkPhysicalDeviceBufferDeviceAddressFeatures</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesEXT\">VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCoherentMemoryFeaturesAMD\">VkPhysicalDeviceCoherentMemoryFeaturesAMD</a>, <a href=\"#VkPhysicalDeviceColorWriteEnableFeaturesEXT\">VkPhysicalDeviceColorWriteEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceComputeShaderDerivativesFeaturesNV\">VkPhysicalDeviceComputeShaderDerivativesFeaturesNV</a>, <a href=\"#VkPhysicalDeviceConditionalRenderingFeaturesEXT\">VkPhysicalDeviceConditionalRenderingFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCooperativeMatrixFeaturesNV\">VkPhysicalDeviceCooperativeMatrixFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCornerSampledImageFeaturesNV\">VkPhysicalDeviceCornerSampledImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCoverageReductionModeFeaturesNV\">VkPhysicalDeviceCoverageReductionModeFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCustomBorderColorFeaturesEXT\">VkPhysicalDeviceCustomBorderColorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV\">VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDepthClipEnableFeaturesEXT\">VkPhysicalDeviceDepthClipEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDescriptorIndexingFeatures\">VkPhysicalDeviceDescriptorIndexingFeatures</a>, <a href=\"#VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV\">VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDeviceMemoryReportFeaturesEXT\">VkPhysicalDeviceDeviceMemoryReportFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDiagnosticsConfigFeaturesNV\">VkPhysicalDeviceDiagnosticsConfigFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExclusiveScissorFeaturesNV\">VkPhysicalDeviceExclusiveScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExtendedDynamicState2FeaturesEXT\">VkPhysicalDeviceExtendedDynamicState2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceExtendedDynamicStateFeaturesEXT\">VkPhysicalDeviceExtendedDynamicStateFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMap2FeaturesEXT\">VkPhysicalDeviceFragmentDensityMap2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMapFeaturesEXT\">VkPhysicalDeviceFragmentDensityMapFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV\">VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT\">VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV\">VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateFeaturesKHR\">VkPhysicalDeviceFragmentShadingRateFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceHostQueryResetFeatures\">VkPhysicalDeviceHostQueryResetFeatures</a>, <a href=\"#VkPhysicalDeviceImageRobustnessFeaturesEXT\">VkPhysicalDeviceImageRobustnessFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceImagelessFramebufferFeatures\">VkPhysicalDeviceImagelessFramebufferFeatures</a>, <a href=\"#VkPhysicalDeviceIndexTypeUint8FeaturesEXT\">VkPhysicalDeviceIndexTypeUint8FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceInheritedViewportScissorFeaturesNV\">VkPhysicalDeviceInheritedViewportScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceInlineUniformBlockFeaturesEXT\">VkPhysicalDeviceInlineUniformBlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceLineRasterizationFeaturesEXT\">VkPhysicalDeviceLineRasterizationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMemoryPriorityFeaturesEXT\">VkPhysicalDeviceMemoryPriorityFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMeshShaderFeaturesNV\">VkPhysicalDeviceMeshShaderFeaturesNV</a>, <a href=\"#VkPhysicalDeviceMultiviewFeatures\">VkPhysicalDeviceMultiviewFeatures</a>, <a href=\"#VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE\">VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE</a>, <a href=\"#VkPhysicalDevicePerformanceQueryFeaturesKHR\">VkPhysicalDevicePerformanceQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT\">VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT</a>, <a href=\"#VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR\">VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePortabilitySubsetFeaturesKHR\">VkPhysicalDevicePortabilitySubsetFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePrivateDataFeaturesEXT\">VkPhysicalDevicePrivateDataFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceProtectedMemoryFeatures\">VkPhysicalDeviceProtectedMemoryFeatures</a>, <a href=\"#VkPhysicalDeviceProvokingVertexFeaturesEXT\">VkPhysicalDeviceProvokingVertexFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceRayQueryFeaturesKHR\">VkPhysicalDeviceRayQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRayTracingPipelineFeaturesKHR\">VkPhysicalDeviceRayTracingPipelineFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV\">VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV</a>, <a href=\"#VkPhysicalDeviceRobustness2FeaturesEXT\">VkPhysicalDeviceRobustness2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSamplerYcbcrConversionFeatures\">VkPhysicalDeviceSamplerYcbcrConversionFeatures</a>, <a href=\"#VkPhysicalDeviceScalarBlockLayoutFeatures\">VkPhysicalDeviceScalarBlockLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures\">VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures</a>, <a href=\"#VkPhysicalDeviceShaderAtomicFloatFeaturesEXT\">VkPhysicalDeviceShaderAtomicFloatFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderAtomicInt64Features\">VkPhysicalDeviceShaderAtomicInt64Features</a>, <a href=\"#VkPhysicalDeviceShaderClockFeaturesKHR\">VkPhysicalDeviceShaderClockFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT\">VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderDrawParametersFeatures\">VkPhysicalDeviceShaderDrawParametersFeatures</a>, <a href=\"#VkPhysicalDeviceShaderFloat16Int8Features\">VkPhysicalDeviceShaderFloat16Int8Features</a>, <a href=\"#VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT\">VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderImageFootprintFeaturesNV\">VkPhysicalDeviceShaderImageFootprintFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL\">VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL</a>, <a href=\"#VkPhysicalDeviceShaderSMBuiltinsFeaturesNV\">VkPhysicalDeviceShaderSMBuiltinsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures\">VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures</a>, <a href=\"#VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR\">VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShadingRateImageFeaturesNV\">VkPhysicalDeviceShadingRateImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceSubgroupSizeControlFeaturesEXT\">VkPhysicalDeviceSubgroupSizeControlFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSynchronization2FeaturesKHR\">VkPhysicalDeviceSynchronization2FeaturesKHR</a>, <a href=\"#VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT\">VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT\">VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTimelineSemaphoreFeatures\">VkPhysicalDeviceTimelineSemaphoreFeatures</a>, <a href=\"#VkPhysicalDeviceTransformFeedbackFeaturesEXT\">VkPhysicalDeviceTransformFeedbackFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceUniformBufferStandardLayoutFeatures\">VkPhysicalDeviceUniformBufferStandardLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceVariablePointersFeatures\">VkPhysicalDeviceVariablePointersFeatures</a>, <a href=\"#VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT\">VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT\">VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVulkan11Features\">VkPhysicalDeviceVulkan11Features</a>, <a href=\"#VkPhysicalDeviceVulkan12Features\">VkPhysicalDeviceVulkan12Features</a>, <a href=\"#VkPhysicalDeviceVulkanMemoryModelFeatures\">VkPhysicalDeviceVulkanMemoryModelFeatures</a>, <a href=\"#VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR\">VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT\">VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceYcbcrImageArraysFeaturesEXT\">VkPhysicalDeviceYcbcrImageArraysFeaturesEXT</a>, or <a href=\"#VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR\">VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR</a>"
+ "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceDeviceMemoryReportCreateInfoEXT\">VkDeviceDeviceMemoryReportCreateInfoEXT</a>, <a href=\"#VkDeviceDiagnosticsConfigCreateInfoNV\">VkDeviceDiagnosticsConfigCreateInfoNV</a>, <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>, <a href=\"#VkDeviceMemoryOverallocationCreateInfoAMD\">VkDeviceMemoryOverallocationCreateInfoAMD</a>, <a href=\"#VkDevicePrivateDataCreateInfoEXT\">VkDevicePrivateDataCreateInfoEXT</a>, <a href=\"#VkPhysicalDevice16BitStorageFeatures\">VkPhysicalDevice16BitStorageFeatures</a>, <a href=\"#VkPhysicalDevice4444FormatsFeaturesEXT\">VkPhysicalDevice4444FormatsFeaturesEXT</a>, <a href=\"#VkPhysicalDevice8BitStorageFeatures\">VkPhysicalDevice8BitStorageFeatures</a>, <a href=\"#VkPhysicalDeviceASTCDecodeFeaturesEXT\">VkPhysicalDeviceASTCDecodeFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceAccelerationStructureFeaturesKHR\">VkPhysicalDeviceAccelerationStructureFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT\">VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeatures\">VkPhysicalDeviceBufferDeviceAddressFeatures</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesEXT\">VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCoherentMemoryFeaturesAMD\">VkPhysicalDeviceCoherentMemoryFeaturesAMD</a>, <a href=\"#VkPhysicalDeviceColorWriteEnableFeaturesEXT\">VkPhysicalDeviceColorWriteEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceComputeShaderDerivativesFeaturesNV\">VkPhysicalDeviceComputeShaderDerivativesFeaturesNV</a>, <a href=\"#VkPhysicalDeviceConditionalRenderingFeaturesEXT\">VkPhysicalDeviceConditionalRenderingFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCooperativeMatrixFeaturesNV\">VkPhysicalDeviceCooperativeMatrixFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCornerSampledImageFeaturesNV\">VkPhysicalDeviceCornerSampledImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCoverageReductionModeFeaturesNV\">VkPhysicalDeviceCoverageReductionModeFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCustomBorderColorFeaturesEXT\">VkPhysicalDeviceCustomBorderColorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV\">VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDepthClipEnableFeaturesEXT\">VkPhysicalDeviceDepthClipEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDescriptorIndexingFeatures\">VkPhysicalDeviceDescriptorIndexingFeatures</a>, <a href=\"#VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV\">VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDeviceMemoryReportFeaturesEXT\">VkPhysicalDeviceDeviceMemoryReportFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDiagnosticsConfigFeaturesNV\">VkPhysicalDeviceDiagnosticsConfigFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExclusiveScissorFeaturesNV\">VkPhysicalDeviceExclusiveScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceExtendedDynamicState2FeaturesEXT\">VkPhysicalDeviceExtendedDynamicState2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceExtendedDynamicStateFeaturesEXT\">VkPhysicalDeviceExtendedDynamicStateFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMap2FeaturesEXT\">VkPhysicalDeviceFragmentDensityMap2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMapFeaturesEXT\">VkPhysicalDeviceFragmentDensityMapFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV\">VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT\">VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV\">VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShadingRateFeaturesKHR\">VkPhysicalDeviceFragmentShadingRateFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceHostQueryResetFeatures\">VkPhysicalDeviceHostQueryResetFeatures</a>, <a href=\"#VkPhysicalDeviceImageRobustnessFeaturesEXT\">VkPhysicalDeviceImageRobustnessFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceImagelessFramebufferFeatures\">VkPhysicalDeviceImagelessFramebufferFeatures</a>, <a href=\"#VkPhysicalDeviceIndexTypeUint8FeaturesEXT\">VkPhysicalDeviceIndexTypeUint8FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceInheritedViewportScissorFeaturesNV\">VkPhysicalDeviceInheritedViewportScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceInlineUniformBlockFeaturesEXT\">VkPhysicalDeviceInlineUniformBlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceLineRasterizationFeaturesEXT\">VkPhysicalDeviceLineRasterizationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMemoryPriorityFeaturesEXT\">VkPhysicalDeviceMemoryPriorityFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMeshShaderFeaturesNV\">VkPhysicalDeviceMeshShaderFeaturesNV</a>, <a href=\"#VkPhysicalDeviceMultiviewFeatures\">VkPhysicalDeviceMultiviewFeatures</a>, <a href=\"#VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE\">VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE</a>, <a href=\"#VkPhysicalDevicePerformanceQueryFeaturesKHR\">VkPhysicalDevicePerformanceQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT\">VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT</a>, <a href=\"#VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR\">VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePortabilitySubsetFeaturesKHR\">VkPhysicalDevicePortabilitySubsetFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePrivateDataFeaturesEXT\">VkPhysicalDevicePrivateDataFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceProtectedMemoryFeatures\">VkPhysicalDeviceProtectedMemoryFeatures</a>, <a href=\"#VkPhysicalDeviceProvokingVertexFeaturesEXT\">VkPhysicalDeviceProvokingVertexFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceRayQueryFeaturesKHR\">VkPhysicalDeviceRayQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRayTracingPipelineFeaturesKHR\">VkPhysicalDeviceRayTracingPipelineFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV\">VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV</a>, <a href=\"#VkPhysicalDeviceRobustness2FeaturesEXT\">VkPhysicalDeviceRobustness2FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSamplerYcbcrConversionFeatures\">VkPhysicalDeviceSamplerYcbcrConversionFeatures</a>, <a href=\"#VkPhysicalDeviceScalarBlockLayoutFeatures\">VkPhysicalDeviceScalarBlockLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures\">VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures</a>, <a href=\"#VkPhysicalDeviceShaderAtomicFloatFeaturesEXT\">VkPhysicalDeviceShaderAtomicFloatFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderAtomicInt64Features\">VkPhysicalDeviceShaderAtomicInt64Features</a>, <a href=\"#VkPhysicalDeviceShaderClockFeaturesKHR\">VkPhysicalDeviceShaderClockFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT\">VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderDrawParametersFeatures\">VkPhysicalDeviceShaderDrawParametersFeatures</a>, <a href=\"#VkPhysicalDeviceShaderFloat16Int8Features\">VkPhysicalDeviceShaderFloat16Int8Features</a>, <a href=\"#VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT\">VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderImageFootprintFeaturesNV\">VkPhysicalDeviceShaderImageFootprintFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL\">VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL</a>, <a href=\"#VkPhysicalDeviceShaderSMBuiltinsFeaturesNV\">VkPhysicalDeviceShaderSMBuiltinsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures\">VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures</a>, <a href=\"#VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR\">VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR\">VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShadingRateImageFeaturesNV\">VkPhysicalDeviceShadingRateImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceSubgroupSizeControlFeaturesEXT\">VkPhysicalDeviceSubgroupSizeControlFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSynchronization2FeaturesKHR\">VkPhysicalDeviceSynchronization2FeaturesKHR</a>, <a href=\"#VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT\">VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT\">VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTimelineSemaphoreFeatures\">VkPhysicalDeviceTimelineSemaphoreFeatures</a>, <a href=\"#VkPhysicalDeviceTransformFeedbackFeaturesEXT\">VkPhysicalDeviceTransformFeedbackFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceUniformBufferStandardLayoutFeatures\">VkPhysicalDeviceUniformBufferStandardLayoutFeatures</a>, <a href=\"#VkPhysicalDeviceVariablePointersFeatures\">VkPhysicalDeviceVariablePointersFeatures</a>, <a href=\"#VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT\">VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT\">VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVulkan11Features\">VkPhysicalDeviceVulkan11Features</a>, <a href=\"#VkPhysicalDeviceVulkan12Features\">VkPhysicalDeviceVulkan12Features</a>, <a href=\"#VkPhysicalDeviceVulkanMemoryModelFeatures\">VkPhysicalDeviceVulkanMemoryModelFeatures</a>, <a href=\"#VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR\">VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT\">VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceYcbcrImageArraysFeaturesEXT\">VkPhysicalDeviceYcbcrImageArraysFeaturesEXT</a>, or <a href=\"#VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR\">VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR</a>"
},
{
"vuid": "VUID-VkDeviceCreateInfo-sType-unique",
@@ -9067,10 +9079,6 @@
"text": " The number of resources in <code>layout</code> accessible to each shader stage that is used by the pipeline <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageResources</code>"
},
{
- "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
- "text": " If the pipeline is being created with <a href=\"#pipeline-graphics-subsets-vertex-input\">vertex input state</a>, <code>pVertexInputState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineVertexInputStateCreateInfo\">VkPipelineVertexInputStateCreateInfo</a> structure"
- },
- {
"vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
"text": " If the pipeline is being created with <a href=\"#pipeline-graphics-subsets-vertex-input\">vertex input state</a>, <code>pInputAssemblyState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a> structure"
},
@@ -9303,6 +9311,16 @@
"text": " If the pipeline is being created with <a href=\"#pipeline-graphics-subsets-pre-rasterization\">pre-rasterization shader state</a>, and no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT</code>, and if <code>pNext</code> chain includes a <a href=\"#VkPipelineDiscardRectangleStateCreateInfoEXT\">VkPipelineDiscardRectangleStateCreateInfoEXT</a> structure, and if its <code>discardRectangleCount</code> member is not <code>0</code>, then its <code>pDiscardRectangles</code> member <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>discardRectangleCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
}
],
+ "!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
+ "text": " If the pipeline is being created with <a href=\"#pipeline-graphics-subsets-vertex-input\">vertex input state</a>, <code>pVertexInputState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineVertexInputStateCreateInfo\">VkPipelineVertexInputStateCreateInfo</a> structure"
+ },
+ {
+ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pVertexInputState-04910",
+ "text": " If the pipeline is being created with <a href=\"#pipeline-graphics-subsets-vertex-input\">vertex input state</a>, and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> is not set, <code>pVertexInputState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineVertexInputStateCreateInfo\">VkPipelineVertexInputStateCreateInfo</a> structure"
+ }
+ ],
"(VK_EXT_transform_feedback)": [
{
"vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02317",
@@ -10549,6 +10567,10 @@
"text": " The <code>size</code> member of each element of <code>pMapEntries</code> <strong class=\"purple\">must</strong> be less than or equal to <code>dataSize</code> minus <code>offset</code>"
},
{
+ "vuid": "VUID-VkSpecializationInfo-constantID-04911",
+ "text": " The <code>constantID</code> value of each element of <code>pMapEntries</code> <strong class=\"purple\">must</strong> be unique within <code>pMapEntries</code>"
+ },
+ {
"vuid": "VUID-VkSpecializationInfo-pMapEntries-parameter",
"text": " If <code>mapEntryCount</code> is not <code>0</code>, <code>pMapEntries</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>mapEntryCount</code> valid <a href=\"#VkSpecializationMapEntry\">VkSpecializationMapEntry</a> structures"
},
@@ -12466,13 +12488,13 @@
"(VK_KHR_video_decode_queue)": [
{
"vuid": "VUID-VkBufferCreateInfo-usage-04813",
- "text": " If <code>usage</code> includes <code>VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR</code>, <code>VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR</code>, then the pNext chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a decode codec-operation."
+ "text": " If <code>usage</code> includes <code>VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR</code>, <code>VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a decode codec-operation."
}
],
"(VK_KHR_video_encode_queue)": [
{
"vuid": "VUID-VkBufferCreateInfo-usage-04814",
- "text": " If <code>usage</code> includes <code>VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR</code>, <code>VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR</code>, then the pNext chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a encode codec-operation."
+ "text": " If <code>usage</code> includes <code>VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR</code>, <code>VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a encode codec-operation."
}
]
},
@@ -13202,13 +13224,13 @@
"(VK_KHR_video_decode_queue)": [
{
"vuid": "VUID-VkImageCreateInfo-usage-04815",
- "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR</code>, then the pNext chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a decode codec-operation."
+ "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a decode codec-operation."
}
],
"(VK_KHR_video_encode_queue)": [
{
"vuid": "VUID-VkImageCreateInfo-usage-04816",
- "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR</code>, then the pNext chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a encode codec-operation."
+ "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR</code>, <code>VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a valid <a href=\"#VkVideoProfilesKHR\">VkVideoProfilesKHR</a> structure which includes at least one <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> with a encode codec-operation."
}
]
},
@@ -13756,7 +13778,7 @@
},
{
"vuid": "VUID-VkImageViewCreateInfo-format-04724",
- "text": " If <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y′C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, then the pNext chain <strong class=\"purple\">must</strong> include a <a href=\"#VkSamplerYcbcrConversionInfo\">VkSamplerYcbcrConversionInfo</a> structure with a conversion value other than VK_NULL_HANDLE"
+ "text": " If <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y′C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkSamplerYcbcrConversionInfo\">VkSamplerYcbcrConversionInfo</a> structure with a conversion value other than VK_NULL_HANDLE"
},
{
"vuid": "VUID-VkImageViewCreateInfo-format-04714",
@@ -24841,10 +24863,6 @@
{
"vuid": "VUID-vkCmdDraw-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDraw-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -24906,6 +24924,28 @@
"vuid": "VUID-vkCmdDraw-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDraw-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDraw-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDraw-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDraw-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"vkCmdDrawIndexed": {
@@ -25131,10 +25171,6 @@
{
"vuid": "VUID-vkCmdDrawIndexed-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawIndexed-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -25196,6 +25232,28 @@
"vuid": "VUID-vkCmdDrawIndexed-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexed-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawIndexed-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexed-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexed-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"vkCmdDrawIndirect": {
@@ -25453,10 +25511,6 @@
{
"vuid": "VUID-vkCmdDrawIndirect-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawIndirect-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -25518,6 +25572,28 @@
"vuid": "VUID-vkCmdDrawIndirect-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirect-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawIndirect-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirect-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirect-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"VkDrawIndirectCommand": {
@@ -25803,10 +25879,6 @@
{
"vuid": "VUID-vkCmdDrawIndirectCount-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawIndirectCount-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -25869,6 +25941,28 @@
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
],
+ "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirectCount-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawIndirectCount-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirectCount-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirectCount-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
+ ],
"(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_2)": [
{
"vuid": "VUID-vkCmdDrawIndirectCount-None-04445",
@@ -26131,10 +26225,6 @@
{
"vuid": "VUID-vkCmdDrawIndexedIndirect-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawIndexedIndirect-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -26196,6 +26286,28 @@
"vuid": "VUID-vkCmdDrawIndexedIndirect-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirect-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirect-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"VkDrawIndexedIndirectCommand": {
@@ -26485,10 +26597,6 @@
{
"vuid": "VUID-vkCmdDrawIndexedIndirectCount-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawIndexedIndirectCount-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -26551,6 +26659,28 @@
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
],
+ "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
+ ],
"(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_2)": [
{
"vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04445",
@@ -26801,10 +26931,6 @@
{
"vuid": "VUID-vkCmdDrawIndirectByteCountEXT-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -26866,6 +26992,28 @@
"vuid": "VUID-vkCmdDrawIndirectByteCountEXT-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_EXT_transform_feedback)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"vkCmdBeginConditionalRenderingEXT": {
@@ -27155,10 +27303,6 @@
{
"vuid": "VUID-vkCmdDrawMeshTasksNV-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawMeshTasksNV-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -27220,6 +27364,28 @@
"vuid": "VUID-vkCmdDrawMeshTasksNV-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksNV-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksNV-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"vkCmdDrawMeshTasksIndirectNV": {
@@ -27461,10 +27627,6 @@
{
"vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -27526,6 +27688,28 @@
"vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-primitiveFragmentShadingRateWithMultipleViewports-04552",
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
]
},
"VkDrawMeshTasksIndirectCommandNV": {
@@ -27795,10 +27979,6 @@
{
"vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -27861,6 +28041,28 @@
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
],
+ "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_mesh_shader)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
+ ],
"(VK_NV_mesh_shader)+(VK_VERSION_1_2)": [
{
"vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04445",
@@ -31661,10 +31863,6 @@
{
"vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-primitiveTopology-03420",
"text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT</code> dynamic state enabled then <a href=\"#vkCmdSetPrimitiveTopologyEXT\">vkCmdSetPrimitiveTopologyEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>primitiveTopology</code> parameter of <code>vkCmdSetPrimitiveTopologyEXT</code> <strong class=\"purple\">must</strong> be of the same <a href=\"#drawing-primitive-topology-class\">topology class</a> as the pipeline <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a>::<code>topology</code> state"
- },
- {
- "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pStrides-04884",
- "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
}
],
"(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [
@@ -31727,6 +31925,28 @@
"text": " If the <a href=\"#limits-primitiveFragmentShadingRateWithMultipleViewports\"><code>primitiveFragmentShadingRateWithMultipleViewports</code></a> limit is not supported, the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT</code> dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the <code>PrimitiveShadingRateKHR</code> built-in, then <a href=\"#vkCmdSetViewportWithCountEXT\">vkCmdSetViewportWithCountEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>viewportCount</code> parameter of <code>vkCmdSetViewportWithCountEXT</code> <strong class=\"purple\">must</strong> be <code>1</code>"
}
],
+ "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04912",
+ "text": " If the bound graphics pipeline was created with both the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> and <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic states enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ },
+ {
+ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pStrides-04913",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, but not the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pStrides-04884",
+ "text": " If the bound graphics pipeline was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT</code> dynamic state enabled, then <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this drawing command, and the <code>pStrides</code> parameter of <a href=\"#vkCmdBindVertexBuffers2EXT\">vkCmdBindVertexBuffers2EXT</a> <strong class=\"purple\">must</strong> not be <code>NULL</code>"
+ }
+ ],
+ "(VK_NV_device_generated_commands)+(VK_EXT_vertex_input_dynamic_state)": [
+ {
+ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04914",
+ "text": " If the bound graphics pipeline state was created with the <code>VK_DYNAMIC_STATE_VERTEX_INPUT_EXT</code> dynamic state enabled, then <a href=\"#vkCmdSetVertexInputEXT\">vkCmdSetVertexInputEXT</a> <strong class=\"purple\">must</strong> have been called in the current command buffer prior to this draw command"
+ }
+ ],
"(VK_NV_device_generated_commands)+(VK_EXT_transform_feedback)": [
{
"vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02910",
@@ -35284,11 +35504,11 @@
},
{
"vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03766",
- "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> member was NULL when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> be NULL"
+ "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> address was <code>NULL</code> when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> be <code>NULL</code>"
},
{
"vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03767",
- "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> member was not NULL when <code>srcAccelerationStructure</code> was last built, then it may not be NULL"
+ "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> address was not <code>NULL</code> when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> not be <code>NULL</code>"
},
{
"vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03768",
@@ -35568,11 +35788,11 @@
},
{
"vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03766",
- "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> member was NULL when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> be NULL"
+ "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> address was <code>NULL</code> when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> be <code>NULL</code>"
},
{
"vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03767",
- "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> member was not NULL when <code>srcAccelerationStructure</code> was last built, then it may not be NULL"
+ "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> address was not <code>NULL</code> when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> not be <code>NULL</code>"
},
{
"vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03768",
@@ -36548,11 +36768,11 @@
},
{
"vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03766",
- "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> member was NULL when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> be NULL"
+ "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> address was <code>NULL</code> when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> be <code>NULL</code>"
},
{
"vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03767",
- "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> member was not NULL when <code>srcAccelerationStructure</code> was last built, then it may not be NULL"
+ "text": " For each element of <code>pInfos</code>, if its <code>mode</code> member is <code>VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR</code>, then for each <code>VkAccelerationStructureGeometryKHR</code> structure referred to by its <code>pGeometries</code> or <code>ppGeometries</code> members, if <code>geometryType</code> is <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code>, if its <code>geometry.triangles.transformData</code> address was not <code>NULL</code> when <code>srcAccelerationStructure</code> was last built, then it <strong class=\"purple\">must</strong> not be <code>NULL</code>"
},
{
"vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03768",
@@ -37318,7 +37538,7 @@
},
{
"vuid": "VUID-vkCmdTraceRaysKHR-flags-03511",
- "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR</code>, the shader group handle identified by <code>pMissShaderBindingTable</code> <strong class=\"purple\">must</strong> contain a valid miss shader"
+ "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR</code>, the shader group handle identified by <code>pMissShaderBindingTable</code> <strong class=\"purple\">must</strong> not be set to zero"
},
{
"vuid": "VUID-vkCmdTraceRaysKHR-flags-03512",
@@ -37334,11 +37554,11 @@
},
{
"vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04735",
- "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
+ "text": " Any non-zero hit shader group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
},
{
"vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04736",
- "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
+ "text": " Any non-zero hit shader group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
},
{
"vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-04625",
@@ -37626,7 +37846,7 @@
},
{
"vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03511",
- "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR</code>, the shader group handle identified by <code>pMissShaderBindingTable</code> <strong class=\"purple\">must</strong> contain a valid miss shader"
+ "text": " If the currently bound ray tracing pipeline was created with <code>flags</code> that included <code>VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR</code>, the shader group handle identified by <code>pMissShaderBindingTable</code> <strong class=\"purple\">must</strong> not be set to zero"
},
{
"vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03512",
@@ -37642,11 +37862,11 @@
},
{
"vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04735",
- "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
+ "text": " Any non-zero hit shader group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_TRIANGLES_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR</code>"
},
{
"vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04736",
- "text": " Any non-zero hit group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
+ "text": " Any non-zero hit shader group entries in <code>pHitShaderBindingTable</code> accessed by this call from a geometry with a <code>geometryType</code> of <code>VK_GEOMETRY_TYPE_AABBS_KHR</code> <strong class=\"purple\">must</strong> have been created with <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR</code>"
},
{
"vuid": "VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03632",
@@ -37954,11 +38174,11 @@
"(VK_KHR_video_queue)": [
{
"vuid": "VUID-VkVideoSessionCreateInfoKHR-pVideoProfile-04845",
- "text": " <code>pVideoProfile</code> <strong class=\"purple\">must</strong> be a pointer to a valid <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> structure whose pNext chain <strong class=\"purple\">must</strong> include a valid codec-specific profile structure."
+ "text": " <code>pVideoProfile</code> <strong class=\"purple\">must</strong> be a pointer to a valid <a href=\"#VkVideoProfileKHR\">VkVideoProfileKHR</a> structure whose <code>pNext</code> chain <strong class=\"purple\">must</strong> include a valid codec-specific profile structure."
},
{
"vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesSlotsCount-04846",
- "text": " If <a href=\"#reference-picture\">Reference Pictures</a> are required for use with the created video session, the <code>maxReferencePicturesSlotsCount</code> <strong class=\"purple\">must</strong> be set to a value bigger than \"zero\"."
+ "text": " If <a href=\"#reference-picture\">Reference Pictures</a> are required for use with the created video session, the <code>maxReferencePicturesSlotsCount</code> <strong class=\"purple\">must</strong> be set to a value bigger than <code>0</code>."
},
{
"vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesSlotsCount-04847",
@@ -37966,7 +38186,7 @@
},
{
"vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesActiveCount-04848",
- "text": " If <a href=\"#reference-picture\">Reference Pictures</a> are required for use with the created video session, the <code>maxReferencePicturesActiveCount</code> <strong class=\"purple\">must</strong> be set to a value bigger than \"zero\"."
+ "text": " If <a href=\"#reference-picture\">Reference Pictures</a> are required for use with the created video session, the <code>maxReferencePicturesActiveCount</code> <strong class=\"purple\">must</strong> be set to a value bigger than <code>0</code>."
},
{
"vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesActiveCount-04849",
@@ -39746,6 +39966,14 @@
}
]
},
+ "VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT": {
+ "(VK_EXT_global_priority_query)": [
+ {
+ "vuid": "VUID-VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT-sType-sType",
+ "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT</code>"
+ }
+ ]
+ },
"VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT": {
"(VK_EXT_pipeline_creation_cache_control)": [
{
@@ -39770,6 +39998,14 @@
}
]
},
+ "VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR": {
+ "(VK_KHR_shader_subgroup_uniform_control_flow)": [
+ {
+ "vuid": "VUID-VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR-sType-sType",
+ "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR</code>"
+ }
+ ]
+ },
"VkPhysicalDeviceRobustness2FeaturesEXT": {
"(VK_EXT_robustness2)": [
{
@@ -41663,6 +41899,46 @@
"text": " Any <code>BuiltIn</code> decoration not listed in <a href=\"#interfaces-builtin-variables\">Built-In Variables</a> <strong class=\"purple\">must</strong> not be used"
},
{
+ "vuid": "VUID-StandaloneSpirv-Location-04915",
+ "text": " The <code>Location</code> or <code>Component</code> decorations <strong class=\"purple\">must</strong> not be used with <code>BuiltIn</code>"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Location-04916",
+ "text": " The <code>Location</code> decorations <strong class=\"purple\">must</strong> be used on <a href=\"#interfaces-iointerfaces-user\">user-defined variables</a>"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Location-04917",
+ "text": " The <code>Location</code> decorations <strong class=\"purple\">must</strong> be used on an <code>OpVariable</code> with a structure type that is not a block"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Location-04918",
+ "text": " The <code>Location</code> decorations <strong class=\"purple\">must</strong> not be used on the members of <code>OpVariable</code> with a structure type that is a block decorated with <code>Location</code>"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Location-04919",
+ "text": " The <code>Location</code> decorations <strong class=\"purple\">must</strong> be used on each member of <code>OpVariable</code> with a structure type that is a block not decorated with <code>Location</code>"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Component-04920",
+ "text": " The <code>Component</code> decoration value <strong class=\"purple\">must</strong> not be greater than 3"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Component-04921",
+ "text": " If the <code>Component</code> decoration is used on an <code>OpVariable</code> that has a <code>OpTypeVector</code> type with a <code>Component</code> <code>Type</code> with a <code>Width</code> that is less than or equal to 32, the sum of its <code>Component</code> <code>Count</code> and the <code>Component</code> decoration value <strong class=\"purple\">must</strong> be less than 4"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Component-04922",
+ "text": " If the <code>Component</code> decoration is used on an <code>OpVariable</code> that has a <code>OpTypeVector</code> type with a <code>Component</code> <code>Type</code> with a <code>Width</code> that is equal to 64, the sum of two times its <code>Component</code> <code>Count</code> and the <code>Component</code> decoration value <strong class=\"purple\">must</strong> be less than 4"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Component-04923",
+ "text": " The <code>Component</code> decorations value <strong class=\"purple\">must</strong> not be 1 or 3 for scalar or two-component 64-bit data types"
+ },
+ {
+ "vuid": "VUID-StandaloneSpirv-Component-04924",
+ "text": " The <code>Component</code> decorations <strong class=\"purple\">must</strong> not used with any type that is not a scalar or vector"
+ },
+ {
"vuid": "VUID-StandaloneSpirv-GLSLShared-04669",
"text": " The <code>GLSLShared</code> and <code>GLSLPacked</code> decorations <strong class=\"purple\">must</strong> not be used"
},
diff --git a/registry/vk.xml b/registry/vk.xml
index 19ca6f7..98dc2c0 100644
--- a/registry/vk.xml
+++ b/registry/vk.xml
@@ -71,6 +71,7 @@ branch of the member gitlab server.
<tag name="VALVE" author="Valve Corporation" contact="Pierre-Loup Griffais @plagman, Joshua Ashton @Joshua-Ashton, Hans-Kristian Arntzen @HansKristian-Work"/>
<tag name="QNX" author="BlackBerry Limited" contact="Mike Gorchak @mgorchak-blackberry"/>
<tag name="JUICE" author="Juice Technologies, Inc." contact="David McCloskey @damcclos, Dean Beeler @canadacow"/>
+ <tag name="FB" author="Facebook, Inc" contact="Artem Bolgar @artyom17"/>
</tags>
<types comment="Vulkan type definitions">
@@ -154,7 +155,7 @@ branch of the member gitlab server.
<type category="define" requires="VK_MAKE_API_VERSION">// Vulkan 1.2 version number
#define <name>VK_API_VERSION_1_2</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 2, 0)// Patch version should always be set to 0</type>
<type category="define">// Version of this file
-#define <name>VK_HEADER_VERSION</name> 179</type>
+#define <name>VK_HEADER_VERSION</name> 180</type>
<type category="define" requires="VK_HEADER_VERSION">// Complete version of this file
#define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 2, VK_HEADER_VERSION)</type>
@@ -3302,6 +3303,17 @@ typedef void <name>CAMetalLayer</name>;
<member optional="true">const <type>void</type>* <name>pNext</name></member>
<member><type>VkQueueGlobalPriorityEXT</type> <name>globalPriority</name></member>
</type>
+ <type category="struct" name="VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT" structextends="VkPhysicalDeviceFeatures2">
+ <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+ <member noautovalidity="true"><type>void</type>* <name>pNext</name></member>
+ <member><type>VkBool32</type> <name>globalPriorityQuery</name></member>
+ </type>
+ <type category="struct" name="VkQueueFamilyGlobalPriorityPropertiesEXT" structextends="VkQueueFamilyProperties2">
+ <member values="VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+ <member optional="true"><type>void</type>* <name>pNext</name></member>
+ <member><type>uint32_t</type> <name>priorityCount</name></member>
+ <member><type>VkQueueGlobalPriorityEXT</type> <name>priorities</name>[<enum>VK_MAX_GLOBAL_PRIORITY_SIZE_EXT</enum>]</member>
+ </type>
<type category="struct" name="VkDebugUtilsObjectNameInfoEXT">
<member values="VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true">const <type>void</type>* <name>pNext</name></member>
@@ -5053,6 +5065,11 @@ typedef void <name>CAMetalLayer</name>;
<member><type>void</type>* <name>pNext</name></member>
<member><type>VkBool32</type> <name>shaderZeroInitializeWorkgroupMemory</name></member>
</type>
+ <type category="struct" name="VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+ <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+ <member><type>void</type>* <name>pNext</name></member>
+ <member><type>VkBool32</type> <name>shaderSubgroupUniformControlFlow</name></member>
+ </type>
<type category="struct" name="VkPhysicalDeviceRobustness2FeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
@@ -5908,6 +5925,7 @@ typedef void <name>CAMetalLayer</name>;
<enum name="VK_MAX_DRIVER_INFO_SIZE_KHR" alias="VK_MAX_DRIVER_INFO_SIZE"/>
<enum type="uint32_t" value="(~0U)" name="VK_SHADER_UNUSED_KHR"/>
<enum name="VK_SHADER_UNUSED_NV" alias="VK_SHADER_UNUSED_KHR"/>
+ <enum type="uint32_t" value="16" name="VK_MAX_GLOBAL_PRIORITY_SIZE_EXT"/>
</enums>
<comment>
@@ -6803,6 +6821,7 @@ typedef void <name>CAMetalLayer</name>;
<enum value="4" name="VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT"/>
<enum value="5" name="VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT"/>
<enum value="6" name="VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT"/>
+ <enum value="7" name="VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT"/>
</enums>
<enums name="VkSubgroupFeatureFlagBits" type="bitmask">
<enum bitpos="0" name="VK_SUBGROUP_FEATURE_BASIC_BIT" comment="Basic subgroup operations"/>
@@ -8153,7 +8172,7 @@ typedef void <name>CAMetalLayer</name>;
<param len="bindingCount" optional="false,true">const <type>VkBuffer</type>* <name>pBuffers</name></param>
<param len="bindingCount">const <type>VkDeviceSize</type>* <name>pOffsets</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDraw</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>vertexCount</name></param>
@@ -8161,7 +8180,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>firstVertex</name></param>
<param><type>uint32_t</type> <name>firstInstance</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawIndexed</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>indexCount</name></param>
@@ -8170,7 +8189,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>int32_t</type> <name>vertexOffset</name></param>
<param><type>uint32_t</type> <name>firstInstance</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawIndirect</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
@@ -8178,7 +8197,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>drawCount</name></param>
<param><type>uint32_t</type> <name>stride</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawIndexedIndirect</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
@@ -8186,20 +8205,20 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>drawCount</name></param>
<param><type>uint32_t</type> <name>stride</name></param>
</command>
- <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="compute">
+ <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDispatch</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>groupCountX</name></param>
<param><type>uint32_t</type> <name>groupCountY</name></param>
<param><type>uint32_t</type> <name>groupCountZ</name></param>
</command>
- <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="compute">
+ <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDispatchIndirect</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
<param><type>VkDeviceSize</type> <name>offset</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyBuffer</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>srcBuffer</name></param>
@@ -8207,7 +8226,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>regionCount</name></param>
<param len="regionCount">const <type>VkBufferCopy</type>* <name>pRegions</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyImage</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkImage</type> <name>srcImage</name></param>
@@ -8217,7 +8236,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>regionCount</name></param>
<param len="regionCount">const <type>VkImageCopy</type>* <name>pRegions</name></param>
</command>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdBlitImage</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkImage</type> <name>srcImage</name></param>
@@ -8228,7 +8247,7 @@ typedef void <name>CAMetalLayer</name>;
<param len="regionCount">const <type>VkImageBlit</type>* <name>pRegions</name></param>
<param><type>VkFilter</type> <name>filter</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyBufferToImage</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>srcBuffer</name></param>
@@ -8237,7 +8256,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>regionCount</name></param>
<param len="regionCount">const <type>VkBufferImageCopy</type>* <name>pRegions</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyImageToBuffer</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkImage</type> <name>srcImage</name></param>
@@ -8246,7 +8265,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>regionCount</name></param>
<param len="regionCount">const <type>VkBufferImageCopy</type>* <name>pRegions</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdUpdateBuffer</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>dstBuffer</name></param>
@@ -8254,7 +8273,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>VkDeviceSize</type> <name>dataSize</name></param>
<param len="dataSize">const <type>void</type>* <name>pData</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer" comment="transfer support is only available when VK_KHR_maintenance1 is enabled, as documented in valid usage language in the specification">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" comment="transfer support is only available when VK_KHR_maintenance1 is enabled, as documented in valid usage language in the specification">
<proto><type>void</type> <name>vkCmdFillBuffer</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>dstBuffer</name></param>
@@ -8262,7 +8281,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>VkDeviceSize</type> <name>size</name></param>
<param><type>uint32_t</type> <name>data</name></param>
</command>
- <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdClearColorImage</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkImage</type> <name>image</name></param>
@@ -8271,7 +8290,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>rangeCount</name></param>
<param len="rangeCount">const <type>VkImageSubresourceRange</type>* <name>pRanges</name></param>
</command>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdClearDepthStencilImage</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkImage</type> <name>image</name></param>
@@ -8280,7 +8299,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>rangeCount</name></param>
<param len="rangeCount">const <type>VkImageSubresourceRange</type>* <name>pRanges</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdClearAttachments</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>attachmentCount</name></param>
@@ -8288,7 +8307,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>rectCount</name></param>
<param len="rectCount">const <type>VkClearRect</type>* <name>pRects</name></param>
</command>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdResolveImage</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkImage</type> <name>srcImage</name></param>
@@ -8366,14 +8385,14 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>firstQuery</name></param>
<param><type>uint32_t</type> <name>queryCount</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdWriteTimestamp</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkPipelineStageFlagBits</type> <name>pipelineStage</name></param>
<param><type>VkQueryPool</type> <name>queryPool</name></param>
<param><type>uint32_t</type> <name>query</name></param>
</command>
- <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyQueryPoolResults</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkQueryPool</type> <name>queryPool</name></param>
@@ -8393,18 +8412,18 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>size</name></param>
<param len="size">const <type>void</type>* <name>pValues</name></param>
</command>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary" pipeline="graphics">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary">
<proto><type>void</type> <name>vkCmdBeginRenderPass</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkRenderPassBeginInfo</type>* <name>pRenderPassBegin</name></param>
<param><type>VkSubpassContents</type> <name>contents</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary">
<proto><type>void</type> <name>vkCmdNextSubpass</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkSubpassContents</type> <name>contents</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary">
<proto><type>void</type> <name>vkCmdEndRenderPass</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
</command>
@@ -9402,7 +9421,7 @@ typedef void <name>CAMetalLayer</name>;
<param optional="false">const <type>void</type>* <name>pHostPointer</name></param>
<param><type>VkMemoryHostPointerPropertiesEXT</type>* <name>pMemoryHostPointerProperties</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdWriteBufferMarkerAMD</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkPipelineStageFlagBits</type> <name>pipelineStage</name></param>
@@ -9418,21 +9437,21 @@ typedef void <name>CAMetalLayer</name>;
<param><type>VkRenderPass</type>* <name>pRenderPass</name></param>
</command>
<command name="vkCreateRenderPass2KHR" alias="vkCreateRenderPass2"/>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary" pipeline="graphics">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary">
<proto><type>void</type> <name>vkCmdBeginRenderPass2</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkRenderPassBeginInfo</type>* <name>pRenderPassBegin</name></param>
<param>const <type>VkSubpassBeginInfo</type>* <name>pSubpassBeginInfo</name></param>
</command>
<command name="vkCmdBeginRenderPass2KHR" alias="vkCmdBeginRenderPass2"/>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary">
<proto><type>void</type> <name>vkCmdNextSubpass2</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkSubpassBeginInfo</type>* <name>pSubpassBeginInfo</name></param>
<param>const <type>VkSubpassEndInfo</type>* <name>pSubpassEndInfo</name></param>
</command>
<command name="vkCmdNextSubpass2KHR" alias="vkCmdNextSubpass2"/>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary">
<proto><type>void</type> <name>vkCmdEndRenderPass2</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkSubpassEndInfo</type>* <name>pSubpassEndInfo</name></param>
@@ -9470,7 +9489,7 @@ typedef void <name>CAMetalLayer</name>;
<param>const <type>VkMemoryGetAndroidHardwareBufferInfoANDROID</type>* <name>pInfo</name></param>
<param>struct <type>AHardwareBuffer</type>** <name>pBuffer</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawIndirectCount</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
@@ -9482,7 +9501,7 @@ typedef void <name>CAMetalLayer</name>;
</command>
<command name="vkCmdDrawIndirectCountKHR" alias="vkCmdDrawIndirectCount"/>
<command name="vkCmdDrawIndirectCountAMD" alias="vkCmdDrawIndirectCount"/>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawIndexedIndirectCount</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
@@ -9545,7 +9564,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>query</name></param>
<param><type>uint32_t</type> <name>index</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawIndirectByteCountEXT</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>instanceCount</name></param>
@@ -9582,13 +9601,13 @@ typedef void <name>CAMetalLayer</name>;
<param optional="true"><type>uint32_t</type> <name>customSampleOrderCount</name></param>
<param len="customSampleOrderCount">const <type>VkCoarseSampleOrderCustomNV</type>* <name>pCustomSampleOrders</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawMeshTasksNV</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>taskCount</name></param>
<param><type>uint32_t</type> <name>firstTask</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawMeshTasksIndirectNV</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
@@ -9596,7 +9615,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>uint32_t</type> <name>drawCount</name></param>
<param><type>uint32_t</type> <name>stride</name></param>
</command>
- <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdDrawMeshTasksIndirectCountNV</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkBuffer</type> <name>buffer</name></param>
@@ -10192,32 +10211,32 @@ typedef void <name>CAMetalLayer</name>;
<param><type>VkPrivateDataSlotEXT</type> <name>privateDataSlot</name></param>
<param><type>uint64_t</type>* <name>pData</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyBuffer2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkCopyBufferInfo2KHR</type>* <name>pCopyBufferInfo</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyImage2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkCopyImageInfo2KHR</type>* <name>pCopyImageInfo</name></param>
</command>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdBlitImage2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkBlitImageInfo2KHR</type>* <name>pBlitImageInfo</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyBufferToImage2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkCopyBufferToImageInfo2KHR</type>* <name>pCopyBufferToImageInfo</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCopyImageToBuffer2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkCopyImageToBufferInfo2KHR</type>* <name>pCopyImageToBufferInfo</name></param>
</command>
- <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdResolveImage2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkResolveImageInfo2KHR</type>* <name>pResolveImageInfo</name></param>
@@ -10293,14 +10312,14 @@ typedef void <name>CAMetalLayer</name>;
<param len="submitCount">const <type>VkSubmitInfo2KHR</type>* <name>pSubmits</name></param>
<param optional="true" externsync="true"><type>VkFence</type> <name>fence</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdWriteTimestamp2KHR</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkPipelineStageFlags2KHR</type> <name>stage</name></param>
<param><type>VkQueryPool</type> <name>queryPool</name></param>
<param><type>uint32_t</type> <name>query</name></param>
</command>
- <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+ <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdWriteBufferMarker2AMD</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>VkPipelineStageFlags2KHR</type> <name>stage</name></param>
@@ -10424,7 +10443,7 @@ typedef void <name>CAMetalLayer</name>;
<param><type>VkCuFunctionNVX</type> <name>function</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
</command>
- <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="graphics">
+ <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
<proto><type>void</type> <name>vkCmdCuLaunchKernelNVX</name></proto>
<param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkCuLaunchInfoNVX</type>* <name>pLaunchInfo</name></param>
@@ -12102,6 +12121,8 @@ typedef void <name>CAMetalLayer</name>;
<require>
<enum value="0" name="VK_AMD_EXTENSION_45_SPEC_VERSION"/>
<enum value="&quot;VK_AMD_extension_45&quot;" name="VK_AMD_EXTENSION_45_EXTENSION_NAME"/>
+ <enum bitpos="21" extends="VkPipelineCreateFlagBits" name="VK_PIPELINE_CREATE_RESERVED_21_BIT_AMD"/>
+ <enum bitpos="22" extends="VkPipelineCreateFlagBits" name="VK_PIPELINE_CREATE_RESERVED_22_BIT_AMD"/>
</require>
</extension>
<extension name="VK_AMD_extension_46" number="46" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
@@ -14753,7 +14774,7 @@ typedef void <name>CAMetalLayer</name>;
</extension>
<extension name="VK_EXT_validation_features" number="248" type="instance" author="LUNARG" contact="Karl Schultz @karl-lunarg" specialuse="debugging" supported="vulkan">
<require>
- <enum value="4" name="VK_EXT_VALIDATION_FEATURES_SPEC_VERSION"/>
+ <enum value="5" name="VK_EXT_VALIDATION_FEATURES_SPEC_VERSION"/>
<enum value="&quot;VK_EXT_validation_features&quot;" name="VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME"/>
<enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT"/>
<type name="VkValidationFeaturesEXT"/>
@@ -15600,6 +15621,8 @@ typedef void <name>CAMetalLayer</name>;
<require>
<enum value="0" name="VK_AMD_EXTENSION_319_SPEC_VERSION"/>
<enum value="&quot;VK_AMD_extension_319&quot;" name="VK_AMD_EXTENSION_319_EXTENSION_NAME"/>
+ <enum bitpos="3" extends="VkDescriptorSetLayoutCreateFlagBits" name="VK_DESCRIPTOR_SET_LAYOUT_CREATE_RESERVED_3_BIT_AMD"/>
+ <enum bitpos="0" extends="VkPipelineLayoutCreateFlagBits" name="VK_PIPELINE_LAYOUT_CREATE_RESERVED_0_BIT_AMD"/>
</require>
</extension>
<extension name="VK_AMD_extension_320" number="320" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
@@ -15612,6 +15635,8 @@ typedef void <name>CAMetalLayer</name>;
<require>
<enum value="0" name="VK_AMD_EXTENSION_321_SPEC_VERSION"/>
<enum value="&quot;VK_AMD_extension_321&quot;" name="VK_AMD_EXTENSION_321_EXTENSION_NAME"/>
+ <enum bitpos="23" extends="VkPipelineCreateFlagBits" name="VK_PIPELINE_CREATE_RESERVED_23_BIT_AMD"/>
+ <enum bitpos="10" extends="VkPipelineCreateFlagBits" name="VK_PIPELINE_CREATE_RESERVED_10_BIT_AMD"/>
</require>
</extension>
<extension name="VK_AMD_extension_322" number="322" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
@@ -15626,10 +15651,12 @@ typedef void <name>CAMetalLayer</name>;
<enum value="&quot;VK_AMD_extension_323&quot;" name="VK_AMD_EXTENSION_323_EXTENSION_NAME"/>
</require>
</extension>
- <extension name="VK_KHR_extension_324" number="324" author="KHR" contact="Alan Baker @alan-baker" supported="disabled">
+ <extension name="VK_KHR_shader_subgroup_uniform_control_flow" number="324" type="device" requiresCore="1.1" author="KHR" contact="Alan Baker @alan-baker" supported="vulkan">
<require>
- <enum value="0" name="VK_KHR_EXTENSION_324_SPEC_VERSION"/>
- <enum value="&quot;VK_KHR_extension_324&quot;" name="VK_KHR_EXTENSION_324_EXTENSION_NAME"/>
+ <enum value="1" name="VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION"/>
+ <enum value="&quot;VK_KHR_shader_subgroup_uniform_control_flow&quot;" name="VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME"/>
+ <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR"/>
+ <type name="VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR"/>
</require>
</extension>
<extension name="VK_KHR_extension_325" number="325" author="KHR" contact="Ralph Potter gitlab:@r_potter" supported="disabled">
@@ -16137,10 +16164,15 @@ typedef void <name>CAMetalLayer</name>;
<enum value="&quot;VK_EXT_extension_388&quot;" name="VK_EXT_EXTENSION_388_EXTENSION_NAME"/>
</require>
</extension>
- <extension name="VK_EXT_extension_389" number="389" author="EXT" contact="Yiwei Zhang @zhangyiwei" supported="disabled">
+ <extension name="VK_EXT_global_priority_query" number="389" type="device" requires="VK_EXT_global_priority,VK_KHR_get_physical_device_properties2" author="EXT" contact="Yiwei Zhang @zhangyiwei" supported="vulkan">
<require>
- <enum value="0" name="VK_EXT_EXTENSION_389_SPEC_VERSION"/>
- <enum value="&quot;VK_EXT_extension_389&quot;" name="VK_EXT_EXTENSION_389_EXTENSION_NAME"/>
+ <enum value="1" name="VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION"/>
+ <enum value="&quot;VK_EXT_global_priority_query&quot;" name="VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME"/>
+ <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT"/>
+ <enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT"/>
+ <enum name="VK_MAX_GLOBAL_PRIORITY_SIZE_EXT"/>
+ <type name="VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT"/>
+ <type name="VkQueueFamilyGlobalPriorityPropertiesEXT"/>
</require>
</extension>
<extension name="VK_EXT_extension_390" number="390" author="EXT" contact="Joshua Ashton @Joshua-Ashton" supported="disabled">
@@ -16215,6 +16247,66 @@ typedef void <name>CAMetalLayer</name>;
<enum value="&quot;VK_KHR_extension_401&quot;" name="VK_KHR_EXTENSION_401_EXTENSION_NAME"/>
</require>
</extension>
+ <extension name="VK_FB_extension_402" number="402" author="FB" contact="Artem Bolgar @artyom17" supported="disabled">
+ <require>
+ <enum value="0" name="VK_FB_EXTENSION_402_SPEC_VERSION"/>
+ <enum value="&quot;VK_FB_extension_402&quot;" name="VK_FB_EXTENSION_402_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_FB_extension_403" number="403" author="FB" contact="Artem Bolgar @artyom17" supported="disabled">
+ <require>
+ <enum value="0" name="VK_FB_EXTENSION_403_SPEC_VERSION"/>
+ <enum value="&quot;VK_FB_extension_403&quot;" name="VK_FB_EXTENSION_403_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_FB_extension_404" number="404" author="FB" contact="Artem Bolgar @artyom17" supported="disabled">
+ <require>
+ <enum value="0" name="VK_FB_EXTENSION_404_SPEC_VERSION"/>
+ <enum value="&quot;VK_FB_extension_404&quot;" name="VK_FB_EXTENSION_404_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_HUAWEI_extension_405" number="405" author="HUAWEI" contact="Hueilong Wang @wyvernathuawei" supported="disabled">
+ <require>
+ <enum value="0" name="VK_HUAWEI_EXTENSION_405_SPEC_VERSION"/>
+ <enum value="&quot;VK_HUAWEI_extension_405&quot;" name="VK_HUAWEI_EXTENSION_405_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_HUAWEI_extension_406" number="406" author="HUAWEI" contact="Hueilong Wang @wyvernathuawei" supported="disabled">
+ <require>
+ <enum value="0" name="VK_HUAWEI_EXTENSION_406_SPEC_VERSION"/>
+ <enum value="&quot;VK_HUAWEI_extension_406&quot;" name="VK_HUAWEI_EXTENSION_406_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_GGP_extension_407" number="407" author="GGP" contact="J.D. Rouan @jdrouan" supported="disabled">
+ <require>
+ <enum value="0" name="VK_GGP_EXTENSION_407_SPEC_VERSION"/>
+ <enum value="&quot;VK_GGP_extension_407&quot;" name="VK_GGP_EXTENSION_407_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_GGP_extension_408" number="408" author="GGP" contact="J.D. Rouan @jdrouan" supported="disabled">
+ <require>
+ <enum value="0" name="VK_GGP_EXTENSION_408_SPEC_VERSION"/>
+ <enum value="&quot;VK_GGP_extension_408&quot;" name="VK_GGP_EXTENSION_408_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_GGP_extension_409" number="409" author="GGP" contact="J.D. Rouan @jdrouan" supported="disabled">
+ <require>
+ <enum value="0" name="VK_GGP_EXTENSION_409_SPEC_VERSION"/>
+ <enum value="&quot;VK_GGP_extension_409&quot;" name="VK_GGP_EXTENSION_409_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_GGP_extension_410" number="410" author="GGP" contact="J.D. Rouan @jdrouan" supported="disabled">
+ <require>
+ <enum value="0" name="VK_GGP_EXTENSION_410_SPEC_VERSION"/>
+ <enum value="&quot;VK_GGP_extension_410&quot;" name="VK_GGP_EXTENSION_410_EXTENSION_NAME"/>
+ </require>
+ </extension>
+ <extension name="VK_GGP_extension_411" number="411" author="GGP" contact="J.D. Rouan @jdrouan" supported="disabled">
+ <require>
+ <enum value="0" name="VK_GGP_EXTENSION_411_SPEC_VERSION"/>
+ <enum value="&quot;VK_GGP_extension_411&quot;" name="VK_GGP_EXTENSION_411_EXTENSION_NAME"/>
+ </require>
+ </extension>
</extensions>
<spirvextensions comment="SPIR-V Extensions allowed in Vulkan and what is required to use it">
<spirvextension name="SPV_KHR_variable_pointers">