summaryrefslogtreecommitdiffhomepage
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/vulkan/vulkan.hpp11701
-rw-r--r--include/vulkan/vulkan_core.h265
2 files changed, 7061 insertions, 4905 deletions
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
index 08816ab..12520bd 100644
--- a/include/vulkan/vulkan.hpp
+++ b/include/vulkan/vulkan.hpp
@@ -94,7 +94,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
#endif
-static_assert( VK_HEADER_VERSION == 169 , "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 170 , "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -1742,6 +1742,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
}
+ void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo );
+ }
+
void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
@@ -1767,6 +1772,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdResetEvent( commandBuffer, event, stageMask );
}
+ void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask );
+ }
+
void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
@@ -1852,6 +1862,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetEvent( commandBuffer, event, stageMask );
}
+ void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo );
+ }
+
void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
@@ -1992,6 +2007,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
}
+ void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos );
+ }
+
void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
@@ -2002,6 +2022,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
}
+ void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker );
+ }
+
void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
@@ -2012,6 +2037,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
}
+ void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query );
+ }
+
VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCompileDeferredNV( device, pipeline, shader );
@@ -3217,6 +3247,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
}
+ void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData );
+ }
+
void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
@@ -3397,6 +3432,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
}
+ VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence );
+ }
+
VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueWaitIdle( queue );
@@ -3924,6 +3964,7 @@ namespace VULKAN_HPP_NAMESPACE
eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
+ eNoneKHR = VK_ACCESS_NONE_KHR,
eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR};
@@ -3960,6 +4001,85 @@ namespace VULKAN_HPP_NAMESPACE
case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
+ case AccessFlagBits::eNoneKHR : return "NoneKHR";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ enum class AccessFlagBits2KHR : VkAccessFlags2KHR
+ {
+ e2None = VK_ACCESS_2_NONE_KHR,
+ e2IndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR,
+ e2IndexRead = VK_ACCESS_2_INDEX_READ_BIT_KHR,
+ e2VertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR,
+ e2UniformRead = VK_ACCESS_2_UNIFORM_READ_BIT_KHR,
+ e2InputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR,
+ e2ShaderRead = VK_ACCESS_2_SHADER_READ_BIT_KHR,
+ e2ShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT_KHR,
+ e2ColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR,
+ e2ColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR,
+ e2DepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR,
+ e2DepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR,
+ e2TransferRead = VK_ACCESS_2_TRANSFER_READ_BIT_KHR,
+ e2TransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR,
+ e2HostRead = VK_ACCESS_2_HOST_READ_BIT_KHR,
+ e2HostWrite = VK_ACCESS_2_HOST_WRITE_BIT_KHR,
+ e2MemoryRead = VK_ACCESS_2_MEMORY_READ_BIT_KHR,
+ e2MemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT_KHR,
+ e2ShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR,
+ e2ShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR,
+ e2ShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR,
+ e2TransformFeedbackWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+ e2TransformFeedbackCounterReadExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+ e2TransformFeedbackCounterWriteExt = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+ e2ConditionalRenderingReadExt = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
+ e2CommandPreprocessReadNv = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
+ e2CommandPreprocessWriteNv = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
+ e2FragmentShadingRateAttachmentRead = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+ e2AccelerationStructureRead = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+ e2AccelerationStructureWrite = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+ e2FragmentDensityMapReadExt = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+ e2ColorAttachmentReadNoncoherentExt = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+ e2AccelerationStructureReadNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
+ e2AccelerationStructureWriteNv = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+ e2ShadingRateImageReadNv = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV};
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2KHR value )
+ {
+ switch ( value )
+ {
+ case AccessFlagBits2KHR::e2None : return "2None";
+ case AccessFlagBits2KHR::e2IndirectCommandRead : return "2IndirectCommandRead";
+ case AccessFlagBits2KHR::e2IndexRead : return "2IndexRead";
+ case AccessFlagBits2KHR::e2VertexAttributeRead : return "2VertexAttributeRead";
+ case AccessFlagBits2KHR::e2UniformRead : return "2UniformRead";
+ case AccessFlagBits2KHR::e2InputAttachmentRead : return "2InputAttachmentRead";
+ case AccessFlagBits2KHR::e2ShaderRead : return "2ShaderRead";
+ case AccessFlagBits2KHR::e2ShaderWrite : return "2ShaderWrite";
+ case AccessFlagBits2KHR::e2ColorAttachmentRead : return "2ColorAttachmentRead";
+ case AccessFlagBits2KHR::e2ColorAttachmentWrite : return "2ColorAttachmentWrite";
+ case AccessFlagBits2KHR::e2DepthStencilAttachmentRead : return "2DepthStencilAttachmentRead";
+ case AccessFlagBits2KHR::e2DepthStencilAttachmentWrite : return "2DepthStencilAttachmentWrite";
+ case AccessFlagBits2KHR::e2TransferRead : return "2TransferRead";
+ case AccessFlagBits2KHR::e2TransferWrite : return "2TransferWrite";
+ case AccessFlagBits2KHR::e2HostRead : return "2HostRead";
+ case AccessFlagBits2KHR::e2HostWrite : return "2HostWrite";
+ case AccessFlagBits2KHR::e2MemoryRead : return "2MemoryRead";
+ case AccessFlagBits2KHR::e2MemoryWrite : return "2MemoryWrite";
+ case AccessFlagBits2KHR::e2ShaderSampledRead : return "2ShaderSampledRead";
+ case AccessFlagBits2KHR::e2ShaderStorageRead : return "2ShaderStorageRead";
+ case AccessFlagBits2KHR::e2ShaderStorageWrite : return "2ShaderStorageWrite";
+ case AccessFlagBits2KHR::e2TransformFeedbackWriteExt : return "2TransformFeedbackWriteExt";
+ case AccessFlagBits2KHR::e2TransformFeedbackCounterReadExt : return "2TransformFeedbackCounterReadExt";
+ case AccessFlagBits2KHR::e2TransformFeedbackCounterWriteExt : return "2TransformFeedbackCounterWriteExt";
+ case AccessFlagBits2KHR::e2ConditionalRenderingReadExt : return "2ConditionalRenderingReadExt";
+ case AccessFlagBits2KHR::e2CommandPreprocessReadNv : return "2CommandPreprocessReadNv";
+ case AccessFlagBits2KHR::e2CommandPreprocessWriteNv : return "2CommandPreprocessWriteNv";
+ case AccessFlagBits2KHR::e2FragmentShadingRateAttachmentRead : return "2FragmentShadingRateAttachmentRead";
+ case AccessFlagBits2KHR::e2AccelerationStructureRead : return "2AccelerationStructureRead";
+ case AccessFlagBits2KHR::e2AccelerationStructureWrite : return "2AccelerationStructureWrite";
+ case AccessFlagBits2KHR::e2FragmentDensityMapReadExt : return "2FragmentDensityMapReadExt";
+ case AccessFlagBits2KHR::e2ColorAttachmentReadNoncoherentExt : return "2ColorAttachmentReadNoncoherentExt";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -5255,6 +5375,19 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class EventCreateFlagBits : VkEventCreateFlags
+ {
+ eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR};
+
+ VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case EventCreateFlagBits::eDeviceOnlyKHR : return "DeviceOnlyKHR";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
{
eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
@@ -6354,6 +6487,8 @@ namespace VULKAN_HPP_NAMESPACE
eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+ eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR,
+ eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR,
eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
@@ -6385,6 +6520,8 @@ namespace VULKAN_HPP_NAMESPACE
case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV";
case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
+ case ImageLayout::eReadOnlyOptimalKHR : return "ReadOnlyOptimalKHR";
+ case ImageLayout::eAttachmentOptimalKHR : return "AttachmentOptimalKHR";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -7210,6 +7347,7 @@ namespace VULKAN_HPP_NAMESPACE
eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+ eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR,
eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV};
@@ -7244,6 +7382,90 @@ namespace VULKAN_HPP_NAMESPACE
case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV";
+ case PipelineStageFlagBits::eNoneKHR : return "NoneKHR";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ enum class PipelineStageFlagBits2KHR : VkPipelineStageFlags2KHR
+ {
+ e2None = VK_PIPELINE_STAGE_2_NONE_KHR,
+ e2TopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
+ e2DrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR,
+ e2VertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR,
+ e2VertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR,
+ e2TessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR,
+ e2TessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR,
+ e2GeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR,
+ e2FragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR,
+ e2EarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR,
+ e2LateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR,
+ e2ColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR,
+ e2ComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR,
+ e2AllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR,
+ e2BottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
+ e2Host = VK_PIPELINE_STAGE_2_HOST_BIT_KHR,
+ e2AllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR,
+ e2AllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR,
+ e2Copy = VK_PIPELINE_STAGE_2_COPY_BIT_KHR,
+ e2Resolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR,
+ e2Blit = VK_PIPELINE_STAGE_2_BLIT_BIT_KHR,
+ e2Clear = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR,
+ e2IndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR,
+ e2VertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR,
+ e2PreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR,
+ e2TransformFeedbackExt = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
+ e2ConditionalRenderingExt = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
+ e2CommandPreprocessNv = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
+ e2FragmentShadingRateAttachment = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+ e2AccelerationStructureBuild = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+ e2RayTracingShader = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
+ e2FragmentDensityProcessExt = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+ e2TaskShaderNv = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
+ e2MeshShaderNv = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
+ e2AccelerationStructureBuildNv = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+ e2RayTracingShaderNv = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
+ e2ShadingRateImageNv = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
+ e2Transfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2KHR value )
+ {
+ switch ( value )
+ {
+ case PipelineStageFlagBits2KHR::e2None : return "2None";
+ case PipelineStageFlagBits2KHR::e2TopOfPipe : return "2TopOfPipe";
+ case PipelineStageFlagBits2KHR::e2DrawIndirect : return "2DrawIndirect";
+ case PipelineStageFlagBits2KHR::e2VertexInput : return "2VertexInput";
+ case PipelineStageFlagBits2KHR::e2VertexShader : return "2VertexShader";
+ case PipelineStageFlagBits2KHR::e2TessellationControlShader : return "2TessellationControlShader";
+ case PipelineStageFlagBits2KHR::e2TessellationEvaluationShader : return "2TessellationEvaluationShader";
+ case PipelineStageFlagBits2KHR::e2GeometryShader : return "2GeometryShader";
+ case PipelineStageFlagBits2KHR::e2FragmentShader : return "2FragmentShader";
+ case PipelineStageFlagBits2KHR::e2EarlyFragmentTests : return "2EarlyFragmentTests";
+ case PipelineStageFlagBits2KHR::e2LateFragmentTests : return "2LateFragmentTests";
+ case PipelineStageFlagBits2KHR::e2ColorAttachmentOutput : return "2ColorAttachmentOutput";
+ case PipelineStageFlagBits2KHR::e2ComputeShader : return "2ComputeShader";
+ case PipelineStageFlagBits2KHR::e2AllTransfer : return "2AllTransfer";
+ case PipelineStageFlagBits2KHR::e2BottomOfPipe : return "2BottomOfPipe";
+ case PipelineStageFlagBits2KHR::e2Host : return "2Host";
+ case PipelineStageFlagBits2KHR::e2AllGraphics : return "2AllGraphics";
+ case PipelineStageFlagBits2KHR::e2AllCommands : return "2AllCommands";
+ case PipelineStageFlagBits2KHR::e2Copy : return "2Copy";
+ case PipelineStageFlagBits2KHR::e2Resolve : return "2Resolve";
+ case PipelineStageFlagBits2KHR::e2Blit : return "2Blit";
+ case PipelineStageFlagBits2KHR::e2Clear : return "2Clear";
+ case PipelineStageFlagBits2KHR::e2IndexInput : return "2IndexInput";
+ case PipelineStageFlagBits2KHR::e2VertexAttributeInput : return "2VertexAttributeInput";
+ case PipelineStageFlagBits2KHR::e2PreRasterizationShaders : return "2PreRasterizationShaders";
+ case PipelineStageFlagBits2KHR::e2TransformFeedbackExt : return "2TransformFeedbackExt";
+ case PipelineStageFlagBits2KHR::e2ConditionalRenderingExt : return "2ConditionalRenderingExt";
+ case PipelineStageFlagBits2KHR::e2CommandPreprocessNv : return "2CommandPreprocessNv";
+ case PipelineStageFlagBits2KHR::e2FragmentShadingRateAttachment : return "2FragmentShadingRateAttachment";
+ case PipelineStageFlagBits2KHR::e2AccelerationStructureBuild : return "2AccelerationStructureBuild";
+ case PipelineStageFlagBits2KHR::e2RayTracingShader : return "2RayTracingShader";
+ case PipelineStageFlagBits2KHR::e2FragmentDensityProcessExt : return "2FragmentDensityProcessExt";
+ case PipelineStageFlagBits2KHR::e2TaskShaderNv : return "2TaskShaderNv";
+ case PipelineStageFlagBits2KHR::e2MeshShaderNv : return "2MeshShaderNv";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -8636,6 +8858,16 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
+ eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
+ eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
+ eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
+ eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
+ eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
+ eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
+ eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
+ ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
+ eQueueFamilyCheckpointProperties2Nv = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
+ eCheckpointData2Nv = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
@@ -9307,6 +9539,16 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT";
case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV";
+ case StructureType::eMemoryBarrier2KHR : return "MemoryBarrier2KHR";
+ case StructureType::eBufferMemoryBarrier2KHR : return "BufferMemoryBarrier2KHR";
+ case StructureType::eImageMemoryBarrier2KHR : return "ImageMemoryBarrier2KHR";
+ case StructureType::eDependencyInfoKHR : return "DependencyInfoKHR";
+ case StructureType::eSubmitInfo2KHR : return "SubmitInfo2KHR";
+ case StructureType::eSemaphoreSubmitInfoKHR : return "SemaphoreSubmitInfoKHR";
+ case StructureType::eCommandBufferSubmitInfoKHR : return "CommandBufferSubmitInfoKHR";
+ case StructureType::ePhysicalDeviceSynchronization2FeaturesKHR : return "PhysicalDeviceSynchronization2FeaturesKHR";
+ case StructureType::eQueueFamilyCheckpointProperties2Nv : return "QueueFamilyCheckpointProperties2Nv";
+ case StructureType::eCheckpointData2Nv : return "CheckpointData2Nv";
case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR : return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR";
case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV : return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV : return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
@@ -9366,6 +9608,19 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class SubmitFlagBitsKHR : VkSubmitFlagsKHR
+ {
+ eProtected = VK_SUBMIT_PROTECTED_BIT_KHR};
+
+ VULKAN_HPP_INLINE std::string to_string( SubmitFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case SubmitFlagBitsKHR::eProtected : return "Protected";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
enum class SubpassContents
{
eInline = VK_SUBPASS_CONTENTS_INLINE,
@@ -9794,7 +10049,8 @@ VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) |
VkFlags(AccessFlagBits::eShadingRateImageReadNV) |
VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) |
VkFlags(AccessFlagBits::eCommandPreprocessReadNV) |
-VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
+VkFlags(AccessFlagBits::eCommandPreprocessWriteNV) |
+VkFlags(AccessFlagBits::eNoneKHR)
};
};
@@ -9856,6 +10112,108 @@ VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
}
+ using AccessFlags2KHR = Flags<AccessFlagBits2KHR>;
+
+ template <> struct FlagTraits<AccessFlagBits2KHR>
+ {
+ enum : VkFlags64
+ {
+ allFlags = VkFlags64(AccessFlagBits2KHR::e2None) |
+VkFlags64(AccessFlagBits2KHR::e2IndirectCommandRead) |
+VkFlags64(AccessFlagBits2KHR::e2IndexRead) |
+VkFlags64(AccessFlagBits2KHR::e2VertexAttributeRead) |
+VkFlags64(AccessFlagBits2KHR::e2UniformRead) |
+VkFlags64(AccessFlagBits2KHR::e2InputAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderWrite) |
+VkFlags64(AccessFlagBits2KHR::e2ColorAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2ColorAttachmentWrite) |
+VkFlags64(AccessFlagBits2KHR::e2DepthStencilAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2DepthStencilAttachmentWrite) |
+VkFlags64(AccessFlagBits2KHR::e2TransferRead) |
+VkFlags64(AccessFlagBits2KHR::e2TransferWrite) |
+VkFlags64(AccessFlagBits2KHR::e2HostRead) |
+VkFlags64(AccessFlagBits2KHR::e2HostWrite) |
+VkFlags64(AccessFlagBits2KHR::e2MemoryRead) |
+VkFlags64(AccessFlagBits2KHR::e2MemoryWrite) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderSampledRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderStorageRead) |
+VkFlags64(AccessFlagBits2KHR::e2ShaderStorageWrite) |
+VkFlags64(AccessFlagBits2KHR::e2TransformFeedbackWriteExt) |
+VkFlags64(AccessFlagBits2KHR::e2TransformFeedbackCounterReadExt) |
+VkFlags64(AccessFlagBits2KHR::e2TransformFeedbackCounterWriteExt) |
+VkFlags64(AccessFlagBits2KHR::e2ConditionalRenderingReadExt) |
+VkFlags64(AccessFlagBits2KHR::e2CommandPreprocessReadNv) |
+VkFlags64(AccessFlagBits2KHR::e2CommandPreprocessWriteNv) |
+VkFlags64(AccessFlagBits2KHR::e2FragmentShadingRateAttachmentRead) |
+VkFlags64(AccessFlagBits2KHR::e2AccelerationStructureRead) |
+VkFlags64(AccessFlagBits2KHR::e2AccelerationStructureWrite) |
+VkFlags64(AccessFlagBits2KHR::e2FragmentDensityMapReadExt) |
+VkFlags64(AccessFlagBits2KHR::e2ColorAttachmentReadNoncoherentExt)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator|( AccessFlagBits2KHR bit0, AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return AccessFlags2KHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator&( AccessFlagBits2KHR bit0, AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return AccessFlags2KHR( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator^( AccessFlagBits2KHR bit0, AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return AccessFlags2KHR( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator~( AccessFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( AccessFlags2KHR( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlags2KHR value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & AccessFlagBits2KHR::e2IndirectCommandRead ) result += "2IndirectCommandRead | ";
+ if ( value & AccessFlagBits2KHR::e2IndexRead ) result += "2IndexRead | ";
+ if ( value & AccessFlagBits2KHR::e2VertexAttributeRead ) result += "2VertexAttributeRead | ";
+ if ( value & AccessFlagBits2KHR::e2UniformRead ) result += "2UniformRead | ";
+ if ( value & AccessFlagBits2KHR::e2InputAttachmentRead ) result += "2InputAttachmentRead | ";
+ if ( value & AccessFlagBits2KHR::e2ShaderRead ) result += "2ShaderRead | ";
+ if ( value & AccessFlagBits2KHR::e2ShaderWrite ) result += "2ShaderWrite | ";
+ if ( value & AccessFlagBits2KHR::e2ColorAttachmentRead ) result += "2ColorAttachmentRead | ";
+ if ( value & AccessFlagBits2KHR::e2ColorAttachmentWrite ) result += "2ColorAttachmentWrite | ";
+ if ( value & AccessFlagBits2KHR::e2DepthStencilAttachmentRead ) result += "2DepthStencilAttachmentRead | ";
+ if ( value & AccessFlagBits2KHR::e2DepthStencilAttachmentWrite ) result += "2DepthStencilAttachmentWrite | ";
+ if ( value & AccessFlagBits2KHR::e2TransferRead ) result += "2TransferRead | ";
+ if ( value & AccessFlagBits2KHR::e2TransferWrite ) result += "2TransferWrite | ";
+ if ( value & AccessFlagBits2KHR::e2HostRead ) result += "2HostRead | ";
+ if ( value & AccessFlagBits2KHR::e2HostWrite ) result += "2HostWrite | ";
+ if ( value & AccessFlagBits2KHR::e2MemoryRead ) result += "2MemoryRead | ";
+ if ( value & AccessFlagBits2KHR::e2MemoryWrite ) result += "2MemoryWrite | ";
+ if ( value & AccessFlagBits2KHR::e2ShaderSampledRead ) result += "2ShaderSampledRead | ";
+ if ( value & AccessFlagBits2KHR::e2ShaderStorageRead ) result += "2ShaderStorageRead | ";
+ if ( value & AccessFlagBits2KHR::e2ShaderStorageWrite ) result += "2ShaderStorageWrite | ";
+ if ( value & AccessFlagBits2KHR::e2TransformFeedbackWriteExt ) result += "2TransformFeedbackWriteExt | ";
+ if ( value & AccessFlagBits2KHR::e2TransformFeedbackCounterReadExt ) result += "2TransformFeedbackCounterReadExt | ";
+ if ( value & AccessFlagBits2KHR::e2TransformFeedbackCounterWriteExt ) result += "2TransformFeedbackCounterWriteExt | ";
+ if ( value & AccessFlagBits2KHR::e2ConditionalRenderingReadExt ) result += "2ConditionalRenderingReadExt | ";
+ if ( value & AccessFlagBits2KHR::e2CommandPreprocessReadNv ) result += "2CommandPreprocessReadNv | ";
+ if ( value & AccessFlagBits2KHR::e2CommandPreprocessWriteNv ) result += "2CommandPreprocessWriteNv | ";
+ if ( value & AccessFlagBits2KHR::e2FragmentShadingRateAttachmentRead ) result += "2FragmentShadingRateAttachmentRead | ";
+ if ( value & AccessFlagBits2KHR::e2AccelerationStructureRead ) result += "2AccelerationStructureRead | ";
+ if ( value & AccessFlagBits2KHR::e2AccelerationStructureWrite ) result += "2AccelerationStructureWrite | ";
+ if ( value & AccessFlagBits2KHR::e2FragmentDensityMapReadExt ) result += "2FragmentDensityMapReadExt | ";
+ if ( value & AccessFlagBits2KHR::e2ColorAttachmentReadNoncoherentExt ) result += "2ColorAttachmentReadNoncoherentExt | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+
using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
@@ -11126,20 +11484,45 @@ VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
return "{}";
}
- enum class EventCreateFlagBits : VkFlags
- {};
- VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits )
+ using EventCreateFlags = Flags<EventCreateFlagBits>;
+
+ template <> struct FlagTraits<EventCreateFlagBits>
{
- return "(void)";
+ enum : VkFlags
+ {
+ allFlags = VkFlags(EventCreateFlagBits::eDeviceOnlyKHR)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return EventCreateFlags( bit0 ) | bit1;
}
- using EventCreateFlags = Flags<EventCreateFlagBits>;
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return EventCreateFlags( bit0 ) & bit1;
+ }
- VULKAN_HPP_INLINE std::string to_string( EventCreateFlags )
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
+ return EventCreateFlags( bit0 ) ^ bit1;
+ }
- return "{}";
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( EventCreateFlags( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & EventCreateFlagBits::eDeviceOnlyKHR ) result += "DeviceOnlyKHR | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
}
@@ -12989,7 +13372,8 @@ VkFlags(PipelineStageFlagBits::eShadingRateImageNV) |
VkFlags(PipelineStageFlagBits::eTaskShaderNV) |
VkFlags(PipelineStageFlagBits::eMeshShaderNV) |
VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) |
-VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
+VkFlags(PipelineStageFlagBits::eCommandPreprocessNV) |
+VkFlags(PipelineStageFlagBits::eNoneKHR)
};
};
@@ -13048,6 +13432,112 @@ VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
return "{ " + result.substr(0, result.size() - 3) + " }";
}
+
+ using PipelineStageFlags2KHR = Flags<PipelineStageFlagBits2KHR>;
+
+ template <> struct FlagTraits<PipelineStageFlagBits2KHR>
+ {
+ enum : VkFlags64
+ {
+ allFlags = VkFlags64(PipelineStageFlagBits2KHR::e2None) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TopOfPipe) |
+VkFlags64(PipelineStageFlagBits2KHR::e2DrawIndirect) |
+VkFlags64(PipelineStageFlagBits2KHR::e2VertexInput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2VertexShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TessellationControlShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TessellationEvaluationShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2GeometryShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2FragmentShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2EarlyFragmentTests) |
+VkFlags64(PipelineStageFlagBits2KHR::e2LateFragmentTests) |
+VkFlags64(PipelineStageFlagBits2KHR::e2ColorAttachmentOutput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2ComputeShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AllTransfer) |
+VkFlags64(PipelineStageFlagBits2KHR::e2BottomOfPipe) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Host) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AllGraphics) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AllCommands) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Copy) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Resolve) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Blit) |
+VkFlags64(PipelineStageFlagBits2KHR::e2Clear) |
+VkFlags64(PipelineStageFlagBits2KHR::e2IndexInput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2VertexAttributeInput) |
+VkFlags64(PipelineStageFlagBits2KHR::e2PreRasterizationShaders) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TransformFeedbackExt) |
+VkFlags64(PipelineStageFlagBits2KHR::e2ConditionalRenderingExt) |
+VkFlags64(PipelineStageFlagBits2KHR::e2CommandPreprocessNv) |
+VkFlags64(PipelineStageFlagBits2KHR::e2FragmentShadingRateAttachment) |
+VkFlags64(PipelineStageFlagBits2KHR::e2AccelerationStructureBuild) |
+VkFlags64(PipelineStageFlagBits2KHR::e2RayTracingShader) |
+VkFlags64(PipelineStageFlagBits2KHR::e2FragmentDensityProcessExt) |
+VkFlags64(PipelineStageFlagBits2KHR::e2TaskShaderNv) |
+VkFlags64(PipelineStageFlagBits2KHR::e2MeshShaderNv)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator|( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PipelineStageFlags2KHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator&( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PipelineStageFlags2KHR( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator^( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PipelineStageFlags2KHR( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator~( PipelineStageFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( PipelineStageFlags2KHR( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2KHR value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & PipelineStageFlagBits2KHR::e2TopOfPipe ) result += "2TopOfPipe | ";
+ if ( value & PipelineStageFlagBits2KHR::e2DrawIndirect ) result += "2DrawIndirect | ";
+ if ( value & PipelineStageFlagBits2KHR::e2VertexInput ) result += "2VertexInput | ";
+ if ( value & PipelineStageFlagBits2KHR::e2VertexShader ) result += "2VertexShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2TessellationControlShader ) result += "2TessellationControlShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2TessellationEvaluationShader ) result += "2TessellationEvaluationShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2GeometryShader ) result += "2GeometryShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2FragmentShader ) result += "2FragmentShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2EarlyFragmentTests ) result += "2EarlyFragmentTests | ";
+ if ( value & PipelineStageFlagBits2KHR::e2LateFragmentTests ) result += "2LateFragmentTests | ";
+ if ( value & PipelineStageFlagBits2KHR::e2ColorAttachmentOutput ) result += "2ColorAttachmentOutput | ";
+ if ( value & PipelineStageFlagBits2KHR::e2ComputeShader ) result += "2ComputeShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2AllTransfer ) result += "2AllTransfer | ";
+ if ( value & PipelineStageFlagBits2KHR::e2BottomOfPipe ) result += "2BottomOfPipe | ";
+ if ( value & PipelineStageFlagBits2KHR::e2Host ) result += "2Host | ";
+ if ( value & PipelineStageFlagBits2KHR::e2AllGraphics ) result += "2AllGraphics | ";
+ if ( value & PipelineStageFlagBits2KHR::e2AllCommands ) result += "2AllCommands | ";
+ if ( value & PipelineStageFlagBits2KHR::e2Copy ) result += "2Copy | ";
+ if ( value & PipelineStageFlagBits2KHR::e2Resolve ) result += "2Resolve | ";
+ if ( value & PipelineStageFlagBits2KHR::e2Blit ) result += "2Blit | ";
+ if ( value & PipelineStageFlagBits2KHR::e2Clear ) result += "2Clear | ";
+ if ( value & PipelineStageFlagBits2KHR::e2IndexInput ) result += "2IndexInput | ";
+ if ( value & PipelineStageFlagBits2KHR::e2VertexAttributeInput ) result += "2VertexAttributeInput | ";
+ if ( value & PipelineStageFlagBits2KHR::e2PreRasterizationShaders ) result += "2PreRasterizationShaders | ";
+ if ( value & PipelineStageFlagBits2KHR::e2TransformFeedbackExt ) result += "2TransformFeedbackExt | ";
+ if ( value & PipelineStageFlagBits2KHR::e2ConditionalRenderingExt ) result += "2ConditionalRenderingExt | ";
+ if ( value & PipelineStageFlagBits2KHR::e2CommandPreprocessNv ) result += "2CommandPreprocessNv | ";
+ if ( value & PipelineStageFlagBits2KHR::e2FragmentShadingRateAttachment ) result += "2FragmentShadingRateAttachment | ";
+ if ( value & PipelineStageFlagBits2KHR::e2AccelerationStructureBuild ) result += "2AccelerationStructureBuild | ";
+ if ( value & PipelineStageFlagBits2KHR::e2RayTracingShader ) result += "2RayTracingShader | ";
+ if ( value & PipelineStageFlagBits2KHR::e2FragmentDensityProcessExt ) result += "2FragmentDensityProcessExt | ";
+ if ( value & PipelineStageFlagBits2KHR::e2TaskShaderNv ) result += "2TaskShaderNv | ";
+ if ( value & PipelineStageFlagBits2KHR::e2MeshShaderNv ) result += "2MeshShaderNv | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
enum class PipelineTessellationStateCreateFlagBits : VkFlags
{};
@@ -13910,6 +14400,47 @@ VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
}
+ using SubmitFlagsKHR = Flags<SubmitFlagBitsKHR>;
+
+ template <> struct FlagTraits<SubmitFlagBitsKHR>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags(SubmitFlagBitsKHR::eProtected)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator|( SubmitFlagBitsKHR bit0, SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return SubmitFlagsKHR( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator&( SubmitFlagBitsKHR bit0, SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return SubmitFlagsKHR( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator^( SubmitFlagBitsKHR bit0, SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return SubmitFlagsKHR( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator~( SubmitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( SubmitFlagsKHR( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SubmitFlagsKHR value )
+ {
+
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & SubmitFlagBitsKHR::eProtected ) result += "Protected | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+
using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
template <> struct FlagTraits<SubpassDescriptionFlagBits>
@@ -23004,6 +23535,152 @@ namespace VULKAN_HPP_NAMESPACE
using Type = BufferMemoryBarrier;
};
+ struct BufferMemoryBarrier2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BufferMemoryBarrier2KHR( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : BufferMemoryBarrier2KHR( *reinterpret_cast<BufferMemoryBarrier2KHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2KHR & operator=( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BufferMemoryBarrier2KHR & operator=( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR const *>( &rhs );
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcStageMask = srcStageMask_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstStageMask = dstStageMask_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcQueueFamilyIndex = srcQueueFamilyIndex_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstQueueFamilyIndex = dstQueueFamilyIndex_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ BufferMemoryBarrier2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+
+ operator VkBufferMemoryBarrier2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBufferMemoryBarrier2KHR*>( this );
+ }
+
+ operator VkBufferMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBufferMemoryBarrier2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferMemoryBarrier2KHR const& ) const = default;
+#else
+ bool operator==( BufferMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+ && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+ && ( buffer == rhs.buffer )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( BufferMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {};
+ uint32_t srcQueueFamilyIndex = {};
+ uint32_t dstQueueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+ };
+ static_assert( sizeof( BufferMemoryBarrier2KHR ) == sizeof( VkBufferMemoryBarrier2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BufferMemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eBufferMemoryBarrier2KHR>
+ {
+ using Type = BufferMemoryBarrier2KHR;
+ };
+
struct BufferMemoryRequirementsInfo2
{
static const bool allowDuplicate = false;
@@ -23366,6 +24043,78 @@ namespace VULKAN_HPP_NAMESPACE
using Type = CalibratedTimestampInfoEXT;
};
+ struct CheckpointData2NV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2Nv;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CheckpointData2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage_ = {}, void* pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
+ : stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
+ return *this;
+ }
+
+
+ operator VkCheckpointData2NV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCheckpointData2NV*>( this );
+ }
+
+ operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCheckpointData2NV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CheckpointData2NV const& ) const = default;
+#else
+ bool operator==( CheckpointData2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( stage == rhs.stage )
+ && ( pCheckpointMarker == rhs.pCheckpointMarker );
+ }
+
+ bool operator!=( CheckpointData2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2Nv;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage = {};
+ void* pCheckpointMarker = {};
+
+ };
+ static_assert( sizeof( CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCheckpointData2Nv>
+ {
+ using Type = CheckpointData2NV;
+ };
+
struct CheckpointDataNV
{
static const bool allowDuplicate = false;
@@ -24734,73 +25483,80 @@ namespace VULKAN_HPP_NAMESPACE
using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
};
- struct CommandPoolCreateInfo
+ struct ConditionalRenderingBeginInfoEXT
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
- : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
{}
- VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
+ ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
return *this;
}
- CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ buffer = buffer_;
return *this;
}
- CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
- queueFamilyIndex = queueFamilyIndex_;
+ offset = offset_;
+ return *this;
+ }
+
+ ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
return *this;
}
- operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
+ return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
}
- operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
+ return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CommandPoolCreateInfo const& ) const = default;
+ auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
#else
- bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( queueFamilyIndex == rhs.queueFamilyIndex );
+ && ( buffer == rhs.buffer )
+ && ( offset == rhs.offset )
+ && ( flags == rhs.flags );
}
- bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -24809,300 +25565,315 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
- uint32_t queueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
};
- static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
+ struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
{
- using Type = CommandPoolCreateInfo;
+ using Type = ConditionalRenderingBeginInfoEXT;
};
- class ShaderModule
+ struct DebugUtilsLabelEXT
{
- public:
- using CType = VkShaderModule;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
- public:
- VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
- : m_shaderModule(VK_NULL_HANDLE)
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+ : pLabelName( pLabelName_ ), color( color_ )
{}
- VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_shaderModule(VK_NULL_HANDLE)
- {}
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
- : m_shaderModule( shaderModule )
+ DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
{}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- m_shaderModule = shaderModule;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
return *this;
}
-#endif
- ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- m_shaderModule = VK_NULL_HANDLE;
+ pNext = pNext_;
return *this;
}
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ShaderModule const& ) const = default;
-#else
- bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule == rhs.m_shaderModule;
+ pLabelName = pLabelName_;
+ return *this;
}
- bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule != rhs.m_shaderModule;
+ color = color_;
+ return *this;
}
- bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+
+ operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule < rhs.m_shaderModule;
+ return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
}
-#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule;
+ return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
}
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
+#else
+ bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule != VK_NULL_HANDLE;
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pLabelName == rhs.pLabelName )
+ && ( color == rhs.color );
}
- bool operator!() const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_shaderModule == VK_NULL_HANDLE;
+ return !operator==( rhs );
}
+#endif
- private:
- VkShaderModule m_shaderModule;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
- {
- using type = VULKAN_HPP_NAMESPACE::ShaderModule;
- };
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
- {
- using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
- };
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+ const void* pNext = {};
+ const char* pLabelName = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
- {
- using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
};
-
+ static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
+ struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
{
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ using Type = DebugUtilsLabelEXT;
};
- struct SpecializationMapEntry
+ class QueryPool
{
+ public:
+ using CType = VkQueryPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
- : constantID( constantID_ ), offset( offset_ ), size( size_ )
+ public:
+ VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
+ : m_queryPool(VK_NULL_HANDLE)
{}
- VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
- : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
+ VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_queryPool(VK_NULL_HANDLE)
{}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
+ : m_queryPool( queryPool )
+ {}
- SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
+ m_queryPool = queryPool;
return *this;
}
+#endif
- SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
+ QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- constantID = constantID_;
+ m_queryPool = VK_NULL_HANDLE;
return *this;
}
- SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueryPool const& ) const = default;
+#else
+ bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- offset = offset_;
- return *this;
+ return m_queryPool == rhs.m_queryPool;
}
- SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- size = size_;
- return *this;
+ return m_queryPool != rhs.m_queryPool;
}
-
- operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
+ bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
+ return m_queryPool < rhs.m_queryPool;
}
+#endif
- operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSpecializationMapEntry*>( this );
+ return m_queryPool;
}
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( SpecializationMapEntry const& ) const = default;
-#else
- bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return ( constantID == rhs.constantID )
- && ( offset == rhs.offset )
- && ( size == rhs.size );
+ return m_queryPool != VK_NULL_HANDLE;
}
- bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ return m_queryPool == VK_NULL_HANDLE;
}
-#endif
+ private:
+ VkQueryPool m_queryPool;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
+ {
+ using type = VULKAN_HPP_NAMESPACE::QueryPool;
+ };
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+ };
- public:
- uint32_t constantID = {};
- uint32_t offset = {};
- size_t size = {};
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::QueryPool;
};
- static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
- struct SpecializationInfo
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+ struct RenderPassBeginInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
- : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
{}
- VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
+ RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
{}
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- template <typename T>
- SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
- : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+ RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
+ : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
{}
#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
return *this;
}
- SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- mapEntryCount = mapEntryCount_;
+ pNext = pNext_;
return *this;
}
- SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
- pMapEntries = pMapEntries_;
+ renderPass = renderPass_;
return *this;
}
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
- mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
- pMapEntries = mapEntries_.data();
+ framebuffer = framebuffer_;
return *this;
}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
- dataSize = dataSize_;
+ renderArea = renderArea_;
return *this;
}
- SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
{
- pData = pData_;
+ clearValueCount = clearValueCount_;
+ return *this;
+ }
+
+ RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pClearValues = pClearValues_;
return *this;
}
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- template <typename T>
- SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
{
- dataSize = data_.size() * sizeof(T);
- pData = data_.data();
+ clearValueCount = static_cast<uint32_t>( clearValues_.size() );
+ pClearValues = clearValues_.data();
return *this;
}
#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSpecializationInfo*>( this );
+ return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
}
- operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSpecializationInfo*>( this );
+ return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( SpecializationInfo const& ) const = default;
+ auto operator<=>( RenderPassBeginInfo const& ) const = default;
#else
- bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( mapEntryCount == rhs.mapEntryCount )
- && ( pMapEntries == rhs.pMapEntries )
- && ( dataSize == rhs.dataSize )
- && ( pData == rhs.pData );
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( renderPass == rhs.renderPass )
+ && ( framebuffer == rhs.framebuffer )
+ && ( renderArea == rhs.renderArea )
+ && ( clearValueCount == rhs.clearValueCount )
+ && ( pClearValues == rhs.pClearValues );
}
- bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -25111,103 +25882,84 @@ namespace VULKAN_HPP_NAMESPACE
public:
- uint32_t mapEntryCount = {};
- const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
- size_t dataSize = {};
- const void* pData = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+ uint32_t clearValueCount = {};
+ const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
};
- static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
- struct PipelineShaderStageCreateInfo
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
+ {
+ using Type = RenderPassBeginInfo;
+ };
+
+ struct SubpassBeginInfo
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
- : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
+ VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
+ : contents( contents_ )
{}
- VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
+ SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
return *this;
}
- PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
- {
- stage = stage_;
- return *this;
- }
-
- PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
- {
- module = module_;
- return *this;
- }
-
- PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
- {
- pName = pName_;
- return *this;
- }
-
- PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
{
- pSpecializationInfo = pSpecializationInfo_;
+ contents = contents_;
return *this;
}
- operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
+ return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
}
- operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
+ return *reinterpret_cast<VkSubpassBeginInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
+ auto operator<=>( SubpassBeginInfo const& ) const = default;
#else
- bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( stage == rhs.stage )
- && ( module == rhs.module )
- && ( pName == rhs.pName )
- && ( pSpecializationInfo == rhs.pSpecializationInfo );
+ && ( contents == rhs.contents );
}
- bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -25216,23 +25968,20 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
- VULKAN_HPP_NAMESPACE::ShaderModule module = {};
- const char* pName = {};
- const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
+ VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
};
- static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
+ struct CppType<StructureType, StructureType::eSubpassBeginInfo>
{
- using Type = PipelineShaderStageCreateInfo;
+ using Type = SubpassBeginInfo;
};
+ using SubpassBeginInfoKHR = SubpassBeginInfo;
class PipelineLayout
{
@@ -25334,390 +26083,378 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- class Pipeline
+ class DescriptorSet
{
public:
- using CType = VkPipeline;
+ using CType = VkDescriptorSet;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
public:
- VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
- : m_pipeline(VK_NULL_HANDLE)
+ VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
+ : m_descriptorSet(VK_NULL_HANDLE)
{}
- VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_pipeline(VK_NULL_HANDLE)
+ VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorSet(VK_NULL_HANDLE)
{}
- VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
- : m_pipeline( pipeline )
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorSet( descriptorSet )
{}
#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
+ DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
{
- m_pipeline = pipeline;
+ m_descriptorSet = descriptorSet;
return *this;
}
#endif
- Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- m_pipeline = VK_NULL_HANDLE;
+ m_descriptorSet = VK_NULL_HANDLE;
return *this;
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( Pipeline const& ) const = default;
+ auto operator<=>( DescriptorSet const& ) const = default;
#else
- bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_pipeline == rhs.m_pipeline;
+ return m_descriptorSet == rhs.m_descriptorSet;
}
- bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_pipeline != rhs.m_pipeline;
+ return m_descriptorSet != rhs.m_descriptorSet;
}
- bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_pipeline < rhs.m_pipeline;
+ return m_descriptorSet < rhs.m_descriptorSet;
}
#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
{
- return m_pipeline;
+ return m_descriptorSet;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return m_pipeline != VK_NULL_HANDLE;
+ return m_descriptorSet != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return m_pipeline == VK_NULL_HANDLE;
+ return m_descriptorSet == VK_NULL_HANDLE;
}
private:
- VkPipeline m_pipeline;
+ VkDescriptorSet m_descriptorSet;
};
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
{
- using type = VULKAN_HPP_NAMESPACE::Pipeline;
+ using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
{
- using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
{
- using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
};
template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- struct ComputePipelineCreateInfo
+ class Pipeline
{
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
- : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
- {}
+ public:
+ using CType = VkPipeline;
- VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
- ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
+ public:
+ VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
+ : m_pipeline(VK_NULL_HANDLE)
{}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_pipeline(VK_NULL_HANDLE)
+ {}
- ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
+ VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
+ : m_pipeline( pipeline )
+ {}
- ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ m_pipeline = pipeline;
return *this;
}
+#endif
- ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
+ Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- stage = stage_;
+ m_pipeline = VK_NULL_HANDLE;
return *this;
}
- ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Pipeline const& ) const = default;
+#else
+ bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- layout = layout_;
- return *this;
+ return m_pipeline == rhs.m_pipeline;
}
- ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- basePipelineHandle = basePipelineHandle_;
- return *this;
+ return m_pipeline != rhs.m_pipeline;
}
- ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- basePipelineIndex = basePipelineIndex_;
- return *this;
+ return m_pipeline < rhs.m_pipeline;
}
+#endif
-
- operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+ return m_pipeline;
}
- operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
+ return m_pipeline != VK_NULL_HANDLE;
}
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
-#else
- bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( flags == rhs.flags )
- && ( stage == rhs.stage )
- && ( layout == rhs.layout )
- && ( basePipelineHandle == rhs.basePipelineHandle )
- && ( basePipelineIndex == rhs.basePipelineIndex );
+ return m_pipeline == VK_NULL_HANDLE;
}
- bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
+ private:
+ VkPipeline m_pipeline;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Pipeline;
+ };
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+ };
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
- VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
- int32_t basePipelineIndex = {};
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Pipeline;
};
- static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
template <>
- struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
{
- using Type = ComputePipelineCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- struct ConditionalRenderingBeginInfoEXT
+ class ImageView
{
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
+ public:
+ using CType = VkImageView;
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
- {}
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
- VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ public:
+ VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
+ : m_imageView(VK_NULL_HANDLE)
+ {}
- ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
+ VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_imageView(VK_NULL_HANDLE)
{}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
+ : m_imageView( imageView )
+ {}
- ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
+ m_imageView = imageView;
return *this;
}
+#endif
- ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ m_imageView = VK_NULL_HANDLE;
return *this;
}
- ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageView const& ) const = default;
+#else
+ bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- buffer = buffer_;
- return *this;
+ return m_imageView == rhs.m_imageView;
}
- ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- offset = offset_;
- return *this;
+ return m_imageView != rhs.m_imageView;
}
- ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
- return *this;
+ return m_imageView < rhs.m_imageView;
}
+#endif
-
- operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
+ return m_imageView;
}
- operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
+ return m_imageView != VK_NULL_HANDLE;
}
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
-#else
- bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( buffer == rhs.buffer )
- && ( offset == rhs.offset )
- && ( flags == rhs.flags );
+ return m_imageView == VK_NULL_HANDLE;
}
- bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
+ private:
+ VkImageView m_imageView;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
+ {
+ using type = VULKAN_HPP_NAMESPACE::ImageView;
+ };
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ImageView;
+ };
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
- VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ImageView;
};
- static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
template <>
- struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
{
- using Type = ConditionalRenderingBeginInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- struct ConformanceVersion
+ struct ImageBlit
{
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
- : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
{}
- VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
- : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
+ ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
return *this;
}
- ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- major = major_;
+ srcSubresource = srcSubresource_;
return *this;
}
- ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- minor = minor_;
+ srcOffsets = srcOffsets_;
return *this;
}
- ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- subminor = subminor_;
+ dstSubresource = dstSubresource_;
return *this;
}
- ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- patch = patch_;
+ dstOffsets = dstOffsets_;
return *this;
}
- operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkConformanceVersion*>( this );
+ return *reinterpret_cast<const VkImageBlit*>( this );
}
- operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
+ operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkConformanceVersion*>( this );
+ return *reinterpret_cast<VkImageBlit*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ConformanceVersion const& ) const = default;
+ auto operator<=>( ImageBlit const& ) const = default;
#else
- bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( major == rhs.major )
- && ( minor == rhs.minor )
- && ( subminor == rhs.subminor )
- && ( patch == rhs.patch );
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffsets == rhs.srcOffsets )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffsets == rhs.dstOffsets );
}
- bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -25726,125 +26463,94 @@ namespace VULKAN_HPP_NAMESPACE
public:
- uint8_t major = {};
- uint8_t minor = {};
- uint8_t subminor = {};
- uint8_t patch = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
};
- static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
- using ConformanceVersionKHR = ConformanceVersion;
+ static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
- struct CooperativeMatrixPropertiesNV
+ struct ImageSubresourceRange
{
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
+
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
- : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
+ VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
{}
- VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
+ ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
- return *this;
- }
-
- CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
- {
- MSize = MSize_;
- return *this;
- }
-
- CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
- {
- NSize = NSize_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- KSize = KSize_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
return *this;
}
- CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
- AType = AType_;
+ aspectMask = aspectMask_;
return *this;
}
- CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
{
- BType = BType_;
+ baseMipLevel = baseMipLevel_;
return *this;
}
- CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
{
- CType = CType_;
+ levelCount = levelCount_;
return *this;
}
- CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
- DType = DType_;
+ baseArrayLayer = baseArrayLayer_;
return *this;
}
- CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
- scope = scope_;
+ layerCount = layerCount_;
return *this;
}
- operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+ return *reinterpret_cast<const VkImageSubresourceRange*>( this );
}
- operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
+ return *reinterpret_cast<VkImageSubresourceRange*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
+ auto operator<=>( ImageSubresourceRange const& ) const = default;
#else
- bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( MSize == rhs.MSize )
- && ( NSize == rhs.NSize )
- && ( KSize == rhs.KSize )
- && ( AType == rhs.AType )
- && ( BType == rhs.BType )
- && ( CType == rhs.CType )
- && ( DType == rhs.DType )
- && ( scope == rhs.scope );
+ return ( aspectMask == rhs.aspectMask )
+ && ( baseMipLevel == rhs.baseMipLevel )
+ && ( levelCount == rhs.levelCount )
+ && ( baseArrayLayer == rhs.baseArrayLayer )
+ && ( layerCount == rhs.layerCount );
}
- bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -25853,26 +26559,15 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
- void* pNext = {};
- uint32_t MSize = {};
- uint32_t NSize = {};
- uint32_t KSize = {};
- VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
- VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
- VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
- VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
- VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t baseMipLevel = {};
+ uint32_t levelCount = {};
+ uint32_t baseArrayLayer = {};
+ uint32_t layerCount = {};
};
- static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
- {
- using Type = CooperativeMatrixPropertiesNV;
- };
+ static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
struct CopyAccelerationStructureInfoKHR
{
@@ -26302,66 +26997,191 @@ namespace VULKAN_HPP_NAMESPACE
using Type = CopyBufferToImageInfo2KHR;
};
- struct CopyCommandTransformInfoQCOM
+ struct ImageCopy
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
+ return *this;
+ }
+
+ ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCopy*>( this );
+ }
+
+ operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCopy*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageCopy const& ) const = default;
+#else
+ bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+ };
+ static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageCopy2KHR
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
- : transform( transform_ )
+ VULKAN_HPP_CONSTEXPR ImageCopy2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
{}
- VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
+ ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageCopy2KHR( *reinterpret_cast<ImageCopy2KHR const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2KHR const *>( &rhs );
return *this;
}
- CopyCommandTransformInfoQCOM & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- transform = transform_;
+ srcSubresource = srcSubresource_;
return *this;
}
+ ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
- operator VkCopyCommandTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
+ ImageCopy2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
+ dstSubresource = dstSubresource_;
+ return *this;
}
- operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCopy2KHR*>( this );
+ }
+
+ operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCopy2KHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CopyCommandTransformInfoQCOM const& ) const = default;
+ auto operator<=>( ImageCopy2KHR const& ) const = default;
#else
- bool operator==( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( transform == rhs.transform );
+ && ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
}
- bool operator!=( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -26370,222 +27190,2059 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
- static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
+ struct CppType<StructureType, StructureType::eImageCopy2KHR>
{
- using Type = CopyCommandTransformInfoQCOM;
+ using Type = ImageCopy2KHR;
};
- class DescriptorSet
+ struct CopyImageInfo2KHR
{
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyImageInfo2KHR( *reinterpret_cast<CopyImageInfo2KHR const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2KHR & operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImage = dstImage_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstImageLayout = dstImageLayout_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkCopyImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyImageInfo2KHR*>( this );
+ }
+
+ operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyImageInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyImageInfo2KHR const& ) const = default;
+#else
+ bool operator==( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcImage == rhs.srcImage )
+ && ( srcImageLayout == rhs.srcImageLayout )
+ && ( dstImage == rhs.dstImage )
+ && ( dstImageLayout == rhs.dstImageLayout )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
public:
- using CType = VkDescriptorSet;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Image dstImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions = {};
+
+ };
+ static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyImageInfo2KHR>
+ {
+ using Type = CopyImageInfo2KHR;
+ };
+
+ struct CopyImageToBufferInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyImageToBufferInfo2KHR( *reinterpret_cast<CopyImageToBufferInfo2KHR const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageToBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
+ : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2KHR & operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImage = srcImage_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcImageLayout = srcImageLayout_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstBuffer = dstBuffer_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ CopyImageToBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyImageToBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkCopyImageToBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( this );
+ }
+
+ operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyImageToBufferInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyImageToBufferInfo2KHR const& ) const = default;
+#else
+ bool operator==( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcImage == rhs.srcImage )
+ && ( srcImageLayout == rhs.srcImageLayout )
+ && ( dstBuffer == rhs.dstBuffer )
+ && ( regionCount == rhs.regionCount )
+ && ( pRegions == rhs.pRegions );
+ }
+
+ bool operator!=( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
public:
- VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
- : m_descriptorSet(VK_NULL_HANDLE)
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image srcImage = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
+
+ };
+ static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyImageToBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2KHR>
+ {
+ using Type = CopyImageToBufferInfo2KHR;
+ };
+
+ struct CopyMemoryToAccelerationStructureInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+ : src( src_ ), dst( dst_ ), mode( mode_ )
{}
- VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSet(VK_NULL_HANDLE)
+ CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
{}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSet( descriptorSet )
+ CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+
+
+ operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+ }
+
+ operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+ }
+
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+ };
+ static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
+ {
+ using Type = CopyMemoryToAccelerationStructureInfoKHR;
+ };
+
+ struct DebugMarkerMarkerInfoEXT
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char* pMarkerName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+ : pMarkerName( pMarkerName_ ), color( color_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pMarkerName = pMarkerName_;
+ return *this;
+ }
+
+ DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+ {
+ color = color_;
+ return *this;
+ }
+
+
+ operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
+ }
+
+ operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default;
+#else
+ bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pMarkerName == rhs.pMarkerName )
+ && ( color == rhs.color );
+ }
+
+ bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+ const void* pNext = {};
+ const char* pMarkerName = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
+ };
+ static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
+ {
+ using Type = DebugMarkerMarkerInfoEXT;
+ };
+
+ struct SubpassEndInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
+
+ {}
+
+ VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
+ return *this;
+ }
+
+ SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+
+ operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassEndInfo*>( this );
+ }
+
+ operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassEndInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubpassEndInfo const& ) const = default;
+#else
+ bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext );
+ }
+
+ bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
+ const void* pNext = {};
+
+ };
+ static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassEndInfo>
+ {
+ using Type = SubpassEndInfo;
+ };
+ using SubpassEndInfoKHR = SubpassEndInfo;
+
+ class IndirectCommandsLayoutNV
+ {
+ public:
+ using CType = VkIndirectCommandsLayoutNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
+ : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
+ : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
{}
#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
{
- m_descriptorSet = descriptorSet;
+ m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
return *this;
}
#endif
- DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
- m_descriptorSet = VK_NULL_HANDLE;
+ m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
return *this;
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( DescriptorSet const& ) const = default;
+ auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
#else
- bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet == rhs.m_descriptorSet;
+ return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
}
- bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet != rhs.m_descriptorSet;
+ return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
}
- bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet < rhs.m_descriptorSet;
+ return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
}
#endif
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet;
+ return m_indirectCommandsLayoutNV;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet != VK_NULL_HANDLE;
+ return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return m_descriptorSet == VK_NULL_HANDLE;
+ return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
}
private:
- VkDescriptorSet m_descriptorSet;
+ VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
};
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
{
- using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+ using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
{
- using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+ using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
};
+
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
{
- using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
+ struct IndirectCommandsStreamNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
+ return *this;
+ }
+
+ IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+
+ operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
+ }
+
+ operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
+#else
+ bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( buffer == rhs.buffer )
+ && ( offset == rhs.offset );
+ }
+
+ bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+
+ };
+ static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct GeneratedCommandsInfoNV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
+ : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
+ : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineBindPoint = pipelineBindPoint_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipeline = pipeline_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indirectCommandsLayout = indirectCommandsLayout_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ streamCount = streamCount_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pStreams = pStreams_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
+ {
+ streamCount = static_cast<uint32_t>( streams_.size() );
+ pStreams = streams_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesCount = sequencesCount_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preprocessBuffer = preprocessBuffer_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preprocessOffset = preprocessOffset_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ preprocessSize = preprocessSize_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesCountBuffer = sequencesCountBuffer_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesCountOffset = sequencesCountOffset_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesIndexBuffer = sequencesIndexBuffer_;
+ return *this;
+ }
+
+ GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sequencesIndexOffset = sequencesIndexOffset_;
+ return *this;
+ }
+
+
+ operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
+ }
+
+ operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
+#else
+ bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( pipelineBindPoint == rhs.pipelineBindPoint )
+ && ( pipeline == rhs.pipeline )
+ && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+ && ( streamCount == rhs.streamCount )
+ && ( pStreams == rhs.pStreams )
+ && ( sequencesCount == rhs.sequencesCount )
+ && ( preprocessBuffer == rhs.preprocessBuffer )
+ && ( preprocessOffset == rhs.preprocessOffset )
+ && ( preprocessSize == rhs.preprocessSize )
+ && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+ && ( sequencesCountOffset == rhs.sequencesCountOffset )
+ && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+ && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+ }
+
+ bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+ uint32_t streamCount = {};
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
+ uint32_t sequencesCount = {};
+ VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
+ VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
+ VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
+
+ };
+ static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
+ struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
{
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ using Type = GeneratedCommandsInfoNV;
};
- struct CopyDescriptorSet
+ struct MemoryBarrier
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
- : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
+ VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
{}
- VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
+ MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
return *this;
}
- CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
- srcSet = srcSet_;
+ srcAccessMask = srcAccessMask_;
return *this;
}
- CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
- srcBinding = srcBinding_;
+ dstAccessMask = dstAccessMask_;
return *this;
}
- CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
{
- srcArrayElement = srcArrayElement_;
+ return *reinterpret_cast<const VkMemoryBarrier*>( this );
+ }
+
+ operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryBarrier*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryBarrier const& ) const = default;
+#else
+ bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask );
+ }
+
+ bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+
+ };
+ static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryBarrier>
+ {
+ using Type = MemoryBarrier;
+ };
+
+ struct ImageMemoryBarrier
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
return *this;
}
- CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ oldLayout = oldLayout_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ newLayout = newLayout_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcQueueFamilyIndex = srcQueueFamilyIndex_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstQueueFamilyIndex = dstQueueFamilyIndex_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image = image_;
+ return *this;
+ }
+
+ ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subresourceRange = subresourceRange_;
+ return *this;
+ }
+
+
+ operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+ }
+
+ operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageMemoryBarrier const& ) const = default;
+#else
+ bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( oldLayout == rhs.oldLayout )
+ && ( newLayout == rhs.newLayout )
+ && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+ && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+ && ( image == rhs.image )
+ && ( subresourceRange == rhs.subresourceRange );
+ }
+
+ bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t srcQueueFamilyIndex = {};
+ uint32_t dstQueueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+ };
+ static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageMemoryBarrier>
+ {
+ using Type = ImageMemoryBarrier;
+ };
+
+ struct MemoryBarrier2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryBarrier2KHR( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MemoryBarrier2KHR( *reinterpret_cast<MemoryBarrier2KHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2KHR & operator=( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MemoryBarrier2KHR & operator=( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR const *>( &rhs );
+ return *this;
+ }
+
+ MemoryBarrier2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryBarrier2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcStageMask = srcStageMask_;
+ return *this;
+ }
+
+ MemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ MemoryBarrier2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstStageMask = dstStageMask_;
+ return *this;
+ }
+
+ MemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+
+ operator VkMemoryBarrier2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryBarrier2KHR*>( this );
+ }
+
+ operator VkMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryBarrier2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( MemoryBarrier2KHR const& ) const = default;
+#else
+ bool operator==( MemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( dstAccessMask == rhs.dstAccessMask );
+ }
+
+ bool operator!=( MemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {};
+
+ };
+ static_assert( sizeof( MemoryBarrier2KHR ) == sizeof( VkMemoryBarrier2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eMemoryBarrier2KHR>
+ {
+ using Type = MemoryBarrier2KHR;
+ };
+
+ struct ImageMemoryBarrier2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageMemoryBarrier2KHR( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageMemoryBarrier2KHR( *reinterpret_cast<ImageMemoryBarrier2KHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2KHR & operator=( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageMemoryBarrier2KHR & operator=( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR const *>( &rhs );
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcStageMask = srcStageMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAccessMask = srcAccessMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstStageMask = dstStageMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAccessMask = dstAccessMask_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ oldLayout = oldLayout_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ newLayout = newLayout_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcQueueFamilyIndex = srcQueueFamilyIndex_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstQueueFamilyIndex = dstQueueFamilyIndex_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image = image_;
+ return *this;
+ }
+
+ ImageMemoryBarrier2KHR & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subresourceRange = subresourceRange_;
+ return *this;
+ }
+
+
+ operator VkImageMemoryBarrier2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageMemoryBarrier2KHR*>( this );
+ }
+
+ operator VkImageMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageMemoryBarrier2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageMemoryBarrier2KHR const& ) const = default;
+#else
+ bool operator==( ImageMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( srcStageMask == rhs.srcStageMask )
+ && ( srcAccessMask == rhs.srcAccessMask )
+ && ( dstStageMask == rhs.dstStageMask )
+ && ( dstAccessMask == rhs.dstAccessMask )
+ && ( oldLayout == rhs.oldLayout )
+ && ( newLayout == rhs.newLayout )
+ && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+ && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+ && ( image == rhs.image )
+ && ( subresourceRange == rhs.subresourceRange );
+ }
+
+ bool operator!=( ImageMemoryBarrier2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t srcQueueFamilyIndex = {};
+ uint32_t dstQueueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+ };
+ static_assert( sizeof( ImageMemoryBarrier2KHR ) == sizeof( VkImageMemoryBarrier2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageMemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageMemoryBarrier2KHR>
+ {
+ using Type = ImageMemoryBarrier2KHR;
+ };
+
+ struct DependencyInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DependencyInfoKHR(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR* pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR* pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR* pImageMemoryBarriers_ = {}) VULKAN_HPP_NOEXCEPT
+ : dependencyFlags( dependencyFlags_ ), memoryBarrierCount( memoryBarrierCount_ ), pMemoryBarriers( pMemoryBarriers_ ), bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ), pBufferMemoryBarriers( pBufferMemoryBarriers_ ), imageMemoryBarrierCount( imageMemoryBarrierCount_ ), pImageMemoryBarriers( pImageMemoryBarriers_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DependencyInfoKHR( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DependencyInfoKHR( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DependencyInfoKHR( *reinterpret_cast<DependencyInfoKHR const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DependencyInfoKHR( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR> const & memoryBarriers_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR> const & bufferMemoryBarriers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR> const & imageMemoryBarriers_ = {} )
+ : dependencyFlags( dependencyFlags_ ), memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) ), pMemoryBarriers( memoryBarriers_.data() ), bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) ), pBufferMemoryBarriers( bufferMemoryBarriers_.data() ), imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) ), pImageMemoryBarriers( imageMemoryBarriers_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfoKHR & operator=( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DependencyInfoKHR & operator=( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ DependencyInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ DependencyInfoKHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dependencyFlags = dependencyFlags_;
+ return *this;
+ }
+
+ DependencyInfoKHR & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryBarrierCount = memoryBarrierCount_;
+ return *this;
+ }
+
+ DependencyInfoKHR & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR* pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pMemoryBarriers = pMemoryBarriers_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DependencyInfoKHR & setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
+ pMemoryBarriers = memoryBarriers_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ DependencyInfoKHR & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
+ return *this;
+ }
+
+ DependencyInfoKHR & setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR* pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pBufferMemoryBarriers = pBufferMemoryBarriers_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DependencyInfoKHR & setBufferMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
+ pBufferMemoryBarriers = bufferMemoryBarriers_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ DependencyInfoKHR & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageMemoryBarrierCount = imageMemoryBarrierCount_;
+ return *this;
+ }
+
+ DependencyInfoKHR & setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR* pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pImageMemoryBarriers = pImageMemoryBarriers_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ DependencyInfoKHR & setImageMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
+ pImageMemoryBarriers = imageMemoryBarriers_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkDependencyInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDependencyInfoKHR*>( this );
+ }
+
+ operator VkDependencyInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDependencyInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DependencyInfoKHR const& ) const = default;
+#else
+ bool operator==( DependencyInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( dependencyFlags == rhs.dependencyFlags )
+ && ( memoryBarrierCount == rhs.memoryBarrierCount )
+ && ( pMemoryBarriers == rhs.pMemoryBarriers )
+ && ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount )
+ && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers )
+ && ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount )
+ && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
+ }
+
+ bool operator!=( DependencyInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+ uint32_t memoryBarrierCount = {};
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR* pMemoryBarriers = {};
+ uint32_t bufferMemoryBarrierCount = {};
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR* pBufferMemoryBarriers = {};
+ uint32_t imageMemoryBarrierCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR* pImageMemoryBarriers = {};
+
+ };
+ static_assert( sizeof( DependencyInfoKHR ) == sizeof( VkDependencyInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DependencyInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eDependencyInfoKHR>
+ {
+ using Type = DependencyInfoKHR;
+ };
+
+ class Sampler
+ {
+ public:
+ using CType = VkSampler;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
+ : m_sampler(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_sampler(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
+ : m_sampler( sampler )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+ {
+ m_sampler = sampler;
+ return *this;
+ }
+#endif
+
+ Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_sampler = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Sampler const& ) const = default;
+#else
+ bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler == rhs.m_sampler;
+ }
+
+ bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler != rhs.m_sampler;
+ }
+
+ bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler < rhs.m_sampler;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_sampler == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkSampler m_sampler;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Sampler;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Sampler;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Sampler;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct DescriptorImageInfo
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+ : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sampler = sampler_;
+ return *this;
+ }
+
+ DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageView = imageView_;
+ return *this;
+ }
+
+ DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageLayout = imageLayout_;
+ return *this;
+ }
+
+
+ operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
+ }
+
+ operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDescriptorImageInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorImageInfo const& ) const = default;
+#else
+ bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sampler == rhs.sampler )
+ && ( imageView == rhs.imageView )
+ && ( imageLayout == rhs.imageLayout );
+ }
+
+ bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+ VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+ };
+ static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
+
+ struct DescriptorBufferInfo
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ ), offset( offset_ ), range( range_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
+ return *this;
+ }
+
+ DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+ {
+ range = range_;
+ return *this;
+ }
+
+
+ operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
+ }
+
+ operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorBufferInfo const& ) const = default;
+#else
+ bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( buffer == rhs.buffer )
+ && ( offset == rhs.offset )
+ && ( range == rhs.range );
+ }
+
+ bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
+ };
+ static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+
+ class BufferView
+ {
+ public:
+ using CType = VkBufferView;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
+
+ public:
+ VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
+ : m_bufferView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_bufferView(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
+ : m_bufferView( bufferView )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
+ {
+ m_bufferView = bufferView;
+ return *this;
+ }
+#endif
+
+ BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_bufferView = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( BufferView const& ) const = default;
+#else
+ bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView == rhs.m_bufferView;
+ }
+
+ bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView != rhs.m_bufferView;
+ }
+
+ bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView < rhs.m_bufferView;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_bufferView == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkBufferView m_bufferView;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
+ {
+ using type = VULKAN_HPP_NAMESPACE::BufferView;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::BufferView;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::BufferView;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct WriteDescriptorSet
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
+ : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
+ : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
+#else
+ if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
+ return *this;
+ }
+
+ WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
}
- CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
{
dstBinding = dstBinding_;
return *this;
}
- CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
dstArrayElement = dstArrayElement_;
return *this;
}
- CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
+ WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorType = descriptorType_;
+ return *this;
+ }
- operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+ pImageInfo = pImageInfo_;
+ return *this;
}
- operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCopyDescriptorSet*>( this );
+ descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
+ pImageInfo = imageInfo_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pBufferInfo = pBufferInfo_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
+ pBufferInfo = bufferInfo_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pTexelBufferView = pTexelBufferView_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
+ pTexelBufferView = texelBufferView_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+ }
+
+ operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkWriteDescriptorSet*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CopyDescriptorSet const& ) const = default;
+ auto operator<=>( WriteDescriptorSet const& ) const = default;
#else
- bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( srcSet == rhs.srcSet )
- && ( srcBinding == rhs.srcBinding )
- && ( srcArrayElement == rhs.srcArrayElement )
&& ( dstSet == rhs.dstSet )
&& ( dstBinding == rhs.dstBinding )
&& ( dstArrayElement == rhs.dstArrayElement )
- && ( descriptorCount == rhs.descriptorCount );
+ && ( descriptorCount == rhs.descriptorCount )
+ && ( descriptorType == rhs.descriptorType )
+ && ( pImageInfo == rhs.pImageInfo )
+ && ( pBufferInfo == rhs.pBufferInfo )
+ && ( pTexelBufferView == rhs.pTexelBufferView );
}
- bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -26594,103 +29251,402 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
- uint32_t srcBinding = {};
- uint32_t srcArrayElement = {};
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
uint32_t dstBinding = {};
uint32_t dstArrayElement = {};
uint32_t descriptorCount = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
+ const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
};
- static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCopyDescriptorSet>
+ struct CppType<StructureType, StructureType::eWriteDescriptorSet>
{
- using Type = CopyDescriptorSet;
+ using Type = WriteDescriptorSet;
};
- struct ImageCopy2KHR
+ class DescriptorUpdateTemplate
+ {
+ public:
+ using CType = VkDescriptorUpdateTemplate;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
+
+ public:
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
+ : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+ : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorUpdateTemplate = descriptorUpdateTemplate;
+ return *this;
+ }
+#endif
+
+ DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
+#else
+ bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
+ }
+
+ bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+ }
+
+ bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
+ {
+ using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+ using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+
+ class Event
+ {
+ public:
+ using CType = VkEvent;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
+
+ public:
+ VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
+ : m_event(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_event(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
+ : m_event( event )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+ {
+ m_event = event;
+ return *this;
+ }
+#endif
+
+ Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_event = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Event const& ) const = default;
+#else
+ bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event == rhs.m_event;
+ }
+
+ bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event != rhs.m_event;
+ }
+
+ bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event < rhs.m_event;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_event == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkEvent m_event;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
+ {
+ using type = VULKAN_HPP_NAMESPACE::Event;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Event;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::Event;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct ImageResolve
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
+ return *this;
+ }
+
+ ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcSubresource = srcSubresource_;
+ return *this;
+ }
+
+ ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcOffset = srcOffset_;
+ return *this;
+ }
+
+ ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSubresource = dstSubresource_;
+ return *this;
+ }
+
+ ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstOffset = dstOffset_;
+ return *this;
+ }
+
+ ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extent = extent_;
+ return *this;
+ }
+
+
+ operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageResolve*>( this );
+ }
+
+ operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageResolve*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ImageResolve const& ) const = default;
+#else
+ bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( srcSubresource == rhs.srcSubresource )
+ && ( srcOffset == rhs.srcOffset )
+ && ( dstSubresource == rhs.dstSubresource )
+ && ( dstOffset == rhs.dstOffset )
+ && ( extent == rhs.extent );
+ }
+
+ bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+ };
+ static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
+
+ struct ImageResolve2KHR
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageCopy2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
{}
- VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageCopy2KHR( *reinterpret_cast<ImageCopy2KHR const *>( &rhs ) )
+ ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageResolve2KHR( *reinterpret_cast<ImageResolve2KHR const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2KHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
return *this;
}
- ImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImageCopy2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- ImageCopy2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
- ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
- operator VkImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageCopy2KHR*>( this );
+ return *reinterpret_cast<const VkImageResolve2KHR*>( this );
}
- operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageCopy2KHR*>( this );
+ return *reinterpret_cast<VkImageResolve2KHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageCopy2KHR const& ) const = default;
+ auto operator<=>( ImageResolve2KHR const& ) const = default;
#else
- bool operator==( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -26701,7 +29657,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( extent == rhs.extent );
}
- bool operator!=( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -26710,7 +29666,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
@@ -26719,90 +29675,90 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
- static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eImageCopy2KHR>
+ struct CppType<StructureType, StructureType::eImageResolve2KHR>
{
- using Type = ImageCopy2KHR;
+ using Type = ImageResolve2KHR;
};
- struct CopyImageInfo2KHR
+ struct ResolveImageInfo2KHR
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
{}
- VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyImageInfo2KHR( *reinterpret_cast<CopyImageInfo2KHR const *>( &rhs ) )
+ ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ResolveImageInfo2KHR( *reinterpret_cast<ResolveImageInfo2KHR const *>( &rhs ) )
{}
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- CopyImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
+ ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2KHR & operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
return *this;
}
- CopyImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
- CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
- CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
- CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
- CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
- CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- CopyImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -26811,21 +29767,21 @@ namespace VULKAN_HPP_NAMESPACE
#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkCopyImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCopyImageInfo2KHR*>( this );
+ return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
}
- operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCopyImageInfo2KHR*>( this );
+ return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CopyImageInfo2KHR const& ) const = default;
+ auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
#else
- bool operator==( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -26837,7 +29793,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pRegions == rhs.pRegions );
}
- bool operator!=( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -26846,128 +29802,2125 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
- const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions = {};
+ const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
};
- static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CopyImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCopyImageInfo2KHR>
+ struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
{
- using Type = CopyImageInfo2KHR;
+ using Type = ResolveImageInfo2KHR;
};
- struct CopyImageToBufferInfo2KHR
+ struct PerformanceMarkerInfoINTEL
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
- : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+ : marker( marker_ )
{}
- VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyImageToBufferInfo2KHR( *reinterpret_cast<CopyImageToBufferInfo2KHR const *>( &rhs ) )
+ PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
+ return *this;
+ }
+
+ PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
+ {
+ marker = marker_;
+ return *this;
+ }
+
+
+ operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
+ }
+
+ operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( marker == rhs.marker );
+ }
+
+ bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+ const void* pNext = {};
+ uint64_t marker = {};
+
+ };
+ static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
+ {
+ using Type = PerformanceMarkerInfoINTEL;
+ };
+
+ struct PerformanceOverrideInfoINTEL
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
+ : type( type_ ), enable( enable_ ), parameter( parameter_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
+ return *this;
+ }
+
+ PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ {
+ type = type_;
+ return *this;
+ }
+
+ PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ enable = enable_;
+ return *this;
+ }
+
+ PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
+ {
+ parameter = parameter_;
+ return *this;
+ }
+
+
+ operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
+ }
+
+ operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( type == rhs.type )
+ && ( enable == rhs.enable )
+ && ( parameter == rhs.parameter );
+ }
+
+ bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+ VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+ uint64_t parameter = {};
+
+ };
+ static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
+ {
+ using Type = PerformanceOverrideInfoINTEL;
+ };
+
+ struct PerformanceStreamMarkerInfoINTEL
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+ : marker( marker_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
+ return *this;
+ }
+
+ PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+ {
+ marker = marker_;
+ return *this;
+ }
+
+
+ operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+ }
+
+ operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
+#else
+ bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( marker == rhs.marker );
+ }
+
+ bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+ const void* pNext = {};
+ uint32_t marker = {};
+
+ };
+ static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
+ {
+ using Type = PerformanceStreamMarkerInfoINTEL;
+ };
+
+ struct Viewport
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
+ : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
+ return *this;
+ }
+
+ Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+ {
+ x = x_;
+ return *this;
+ }
+
+ Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+ {
+ y = y_;
+ return *this;
+ }
+
+ Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
+ {
+ width = width_;
+ return *this;
+ }
+
+ Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
+ {
+ height = height_;
+ return *this;
+ }
+
+ Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ minDepth = minDepth_;
+ return *this;
+ }
+
+ Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDepth = maxDepth_;
+ return *this;
+ }
+
+
+ operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkViewport*>( this );
+ }
+
+ operator VkViewport &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkViewport*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( Viewport const& ) const = default;
+#else
+ bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( x == rhs.x )
+ && ( y == rhs.y )
+ && ( width == rhs.width )
+ && ( height == rhs.height )
+ && ( minDepth == rhs.minDepth )
+ && ( maxDepth == rhs.maxDepth );
+ }
+
+ bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ float x = {};
+ float y = {};
+ float width = {};
+ float height = {};
+ float minDepth = {};
+ float maxDepth = {};
+
+ };
+ static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
+
+ struct ShadingRatePaletteNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
+ : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
{}
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- CopyImageToBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
- : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
+ : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
{}
#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2KHR & operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
return *this;
}
- CopyImageToBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
+ return *this;
+ }
+
+ ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
+ pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
+ }
+
+ operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShadingRatePaletteNV const& ) const = default;
+#else
+ bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
+ && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
+ }
+
+ bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t shadingRatePaletteEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
+
+ };
+ static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct ViewportWScalingNV
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
+ : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
+ return *this;
+ }
+
+ ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
+ {
+ xcoeff = xcoeff_;
+ return *this;
+ }
+
+ ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ycoeff = ycoeff_;
+ return *this;
+ }
+
+
+ operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkViewportWScalingNV*>( this );
+ }
+
+ operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkViewportWScalingNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ViewportWScalingNV const& ) const = default;
+#else
+ bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( xcoeff == rhs.xcoeff )
+ && ( ycoeff == rhs.ycoeff );
+ }
+
+ bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ float xcoeff = {};
+ float ycoeff = {};
+
+ };
+ static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+
+ struct StridedDeviceAddressRegionKHR
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+ : deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
+ return *this;
+ }
+
+ StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceAddress = deviceAddress_;
+ return *this;
+ }
+
+ StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stride = stride_;
+ return *this;
+ }
+
+ StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+
+ operator VkStridedDeviceAddressRegionKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
+ }
+
+ operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( StridedDeviceAddressRegionKHR const& ) const = default;
+#else
+ bool operator==( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( deviceAddress == rhs.deviceAddress )
+ && ( stride == rhs.stride )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+ };
+ static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
+
+ class CommandBuffer
+ {
+ public:
+ using CType = VkCommandBuffer;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
+
+ public:
+ VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
+ : m_commandBuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_commandBuffer(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
+ : m_commandBuffer( commandBuffer )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
+ {
+ m_commandBuffer = commandBuffer;
+ return *this;
+ }
+#endif
+
+ CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_commandBuffer = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBuffer const& ) const = default;
+#else
+ bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer == rhs.m_commandBuffer;
+ }
+
+ bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer != rhs.m_commandBuffer;
+ }
+
+ bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer < rhs.m_commandBuffer;
+ }
+#endif
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, ArrayProxy<const uint32_t> const & indirectStrides, ArrayProxy<const uint32_t* const > const & pMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildAccelerationStructuresKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const DependencyInfoKHR & dependencyInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_commandBuffer == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkCommandBuffer m_commandBuffer;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
+ {
+ using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct CommandBufferSubmitInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : commandBuffer( commandBuffer_ ), deviceMask( deviceMask_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CommandBufferSubmitInfoKHR( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CommandBufferSubmitInfoKHR( *reinterpret_cast<CommandBufferSubmitInfoKHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfoKHR & operator=( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CommandBufferSubmitInfoKHR & operator=( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ CommandBufferSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferSubmitInfoKHR & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
{
- srcImage = srcImage_;
+ commandBuffer = commandBuffer_;
return *this;
}
- CopyImageToBufferInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferSubmitInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
- srcImageLayout = srcImageLayout_;
+ deviceMask = deviceMask_;
return *this;
}
- CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkCommandBufferSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
- dstBuffer = dstBuffer_;
+ return *reinterpret_cast<const VkCommandBufferSubmitInfoKHR*>( this );
+ }
+
+ operator VkCommandBufferSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCommandBufferSubmitInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandBufferSubmitInfoKHR const& ) const = default;
+#else
+ bool operator==( CommandBufferSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( commandBuffer == rhs.commandBuffer )
+ && ( deviceMask == rhs.deviceMask );
+ }
+
+ bool operator!=( CommandBufferSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
+ uint32_t deviceMask = {};
+
+ };
+ static_assert( sizeof( CommandBufferSubmitInfoKHR ) == sizeof( VkCommandBufferSubmitInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CommandBufferSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCommandBufferSubmitInfoKHR>
+ {
+ using Type = CommandBufferSubmitInfoKHR;
+ };
+
+ struct CommandPoolCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
return *this;
}
- CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- regionCount = regionCount_;
+ pNext = pNext_;
return *this;
}
- CopyImageToBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- pRegions = pRegions_;
+ flags = flags_;
+ return *this;
+ }
+
+ CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueFamilyIndex = queueFamilyIndex_;
+ return *this;
+ }
+
+
+ operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
+ }
+
+ operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CommandPoolCreateInfo const& ) const = default;
+#else
+ bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( queueFamilyIndex == rhs.queueFamilyIndex );
+ }
+
+ bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
+ uint32_t queueFamilyIndex = {};
+
+ };
+ static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
+ {
+ using Type = CommandPoolCreateInfo;
+ };
+
+ class ShaderModule
+ {
+ public:
+ using CType = VkShaderModule;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+
+ public:
+ VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
+ : m_shaderModule(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_shaderModule(VK_NULL_HANDLE)
+ {}
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
+ : m_shaderModule( shaderModule )
+ {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+ ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+ {
+ m_shaderModule = shaderModule;
+ return *this;
+ }
+#endif
+
+ ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_shaderModule = VK_NULL_HANDLE;
+ return *this;
+ }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ShaderModule const& ) const = default;
+#else
+ bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule == rhs.m_shaderModule;
+ }
+
+ bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule != rhs.m_shaderModule;
+ }
+
+ bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule < rhs.m_shaderModule;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_shaderModule == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkShaderModule m_shaderModule;
+ };
+ static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+
+ template <>
+ struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
+ {
+ using type = VULKAN_HPP_NAMESPACE::ShaderModule;
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+ };
+
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+ };
+
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ struct SpecializationMapEntry
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
+ : constantID( constantID_ ), offset( offset_ ), size( size_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
+ return *this;
+ }
+
+ SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ constantID = constantID_;
+ return *this;
+ }
+
+ SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+
+ operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
+ }
+
+ operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSpecializationMapEntry*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SpecializationMapEntry const& ) const = default;
+#else
+ bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( constantID == rhs.constantID )
+ && ( offset == rhs.offset )
+ && ( size == rhs.size );
+ }
+
+ bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t constantID = {};
+ uint32_t offset = {};
+ size_t size = {};
+
+ };
+ static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
+
+ struct SpecializationInfo
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+ : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
+ : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
+ return *this;
+ }
+
+ SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mapEntryCount = mapEntryCount_;
+ return *this;
+ }
+
+ SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pMapEntries = pMapEntries_;
return *this;
}
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- CopyImageToBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
{
- regionCount = static_cast<uint32_t>( regions_.size() );
- pRegions = regions_.data();
+ mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
+ pMapEntries = mapEntries_.data();
return *this;
}
#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataSize = dataSize_;
+ return *this;
+ }
- operator VkCopyImageToBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( this );
+ pData = pData_;
+ return *this;
}
- operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ template <typename T>
+ SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCopyImageToBufferInfo2KHR*>( this );
+ dataSize = data_.size() * sizeof(T);
+ pData = data_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSpecializationInfo*>( this );
+ }
+
+ operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSpecializationInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CopyImageToBufferInfo2KHR const& ) const = default;
+ auto operator<=>( SpecializationInfo const& ) const = default;
#else
- bool operator==( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( mapEntryCount == rhs.mapEntryCount )
+ && ( pMapEntries == rhs.pMapEntries )
+ && ( dataSize == rhs.dataSize )
+ && ( pData == rhs.pData );
+ }
+
+ bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint32_t mapEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
+ size_t dataSize = {};
+ const void* pData = {};
+
+ };
+ static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+
+ struct PipelineShaderStageCreateInfo
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stage = stage_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
+ {
+ module = module_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pName = pName_;
+ return *this;
+ }
+
+ PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSpecializationInfo = pSpecializationInfo_;
+ return *this;
+ }
+
+
+ operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
+ }
+
+ operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
+#else
+ bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( srcImage == rhs.srcImage )
- && ( srcImageLayout == rhs.srcImageLayout )
- && ( dstBuffer == rhs.dstBuffer )
- && ( regionCount == rhs.regionCount )
- && ( pRegions == rhs.pRegions );
+ && ( flags == rhs.flags )
+ && ( stage == rhs.stage )
+ && ( module == rhs.module )
+ && ( pName == rhs.pName )
+ && ( pSpecializationInfo == rhs.pSpecializationInfo );
}
- bool operator!=( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -26976,210 +31929,550 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Image srcImage = {};
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
- uint32_t regionCount = {};
- const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
+ VULKAN_HPP_NAMESPACE::ShaderModule module = {};
+ const char* pName = {};
+ const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
};
- static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CopyImageToBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2KHR>
+ struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
{
- using Type = CopyImageToBufferInfo2KHR;
+ using Type = PipelineShaderStageCreateInfo;
};
- struct CopyMemoryToAccelerationStructureInfoKHR
+ struct ComputePipelineCreateInfo
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
- : src( src_ ), dst( dst_ ), mode( mode_ )
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
- CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
+ ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
{}
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
return *this;
}
- CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
- src = src_;
+ flags = flags_;
return *this;
}
- CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
{
- dst = dst_;
+ stage = stage_;
return *this;
}
- CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
- mode = mode_;
+ layout = layout_;
return *this;
}
+ ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ basePipelineHandle = basePipelineHandle_;
+ return *this;
+ }
- operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+ basePipelineIndex = basePipelineIndex_;
+ return *this;
}
- operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+
+ operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+ return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+ }
+
+ operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
+#else
+ bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( stage == rhs.stage )
+ && ( layout == rhs.layout )
+ && ( basePipelineHandle == rhs.basePipelineHandle )
+ && ( basePipelineIndex == rhs.basePipelineIndex );
}
+ bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
};
- static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
+ struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
{
- using Type = CopyMemoryToAccelerationStructureInfoKHR;
+ using Type = ComputePipelineCreateInfo;
};
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- struct D3D12FenceSubmitInfoKHR
+ struct ConformanceVersion
+ {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
+ : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
+ return *this;
+ }
+
+ ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+ {
+ major = major_;
+ return *this;
+ }
+
+ ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ minor = minor_;
+ return *this;
+ }
+
+ ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subminor = subminor_;
+ return *this;
+ }
+
+ ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+ {
+ patch = patch_;
+ return *this;
+ }
+
+
+ operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkConformanceVersion*>( this );
+ }
+
+ operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkConformanceVersion*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( ConformanceVersion const& ) const = default;
+#else
+ bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( major == rhs.major )
+ && ( minor == rhs.minor )
+ && ( subminor == rhs.subminor )
+ && ( patch == rhs.patch );
+ }
+
+ bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ uint8_t major = {};
+ uint8_t minor = {};
+ uint8_t subminor = {};
+ uint8_t patch = {};
+
+ };
+ static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+ using ConformanceVersionKHR = ConformanceVersion;
+
+ struct CooperativeMatrixPropertiesNV
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
+ : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
{}
- VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
+ CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
{}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
- : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+ VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ MSize = MSize_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ NSize = NSize_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ KSize = KSize_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ AType = AType_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ BType = BType_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ CType = CType_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ DType = DType_;
+ return *this;
+ }
+
+ CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+ {
+ scope = scope_;
+ return *this;
+ }
+
+
+ operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+ }
+
+ operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
+#else
+ bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( MSize == rhs.MSize )
+ && ( NSize == rhs.NSize )
+ && ( KSize == rhs.KSize )
+ && ( AType == rhs.AType )
+ && ( BType == rhs.BType )
+ && ( CType == rhs.CType )
+ && ( DType == rhs.DType )
+ && ( scope == rhs.scope );
+ }
+
+ bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
+ void* pNext = {};
+ uint32_t MSize = {};
+ uint32_t NSize = {};
+ uint32_t KSize = {};
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
+
+ };
+ static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
+ {
+ using Type = CooperativeMatrixPropertiesNV;
+ };
+
+ struct CopyCommandTransformInfoQCOM
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
+ : transform( transform_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
{}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
return *this;
}
- D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ CopyCommandTransformInfoQCOM & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+ CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
- waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
+ transform = transform_;
return *this;
}
- D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+
+ operator VkCopyCommandTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
{
- pWaitSemaphoreValues = pWaitSemaphoreValues_;
+ return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
+ }
+
+ operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( CopyCommandTransformInfoQCOM const& ) const = default;
+#else
+ bool operator==( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( transform == rhs.transform );
+ }
+
+ bool operator!=( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+
+ };
+ static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
+ {
+ using Type = CopyCommandTransformInfoQCOM;
+ };
+
+ struct CopyDescriptorSet
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+ : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
return *this;
}
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
- waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
- pWaitSemaphoreValues = waitSemaphoreValues_.data();
+ pNext = pNext_;
return *this;
}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
{
- signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+ srcSet = srcSet_;
return *this;
}
- D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
{
- pSignalSemaphoreValues = pSignalSemaphoreValues_;
+ srcBinding = srcBinding_;
return *this;
}
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
- signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
- pSignalSemaphoreValues = signalSemaphoreValues_.data();
+ srcArrayElement = srcArrayElement_;
+ return *this;
+ }
+
+ CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstSet = dstSet_;
return *this;
}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstBinding = dstBinding_;
+ return *this;
+ }
- operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
+ dstArrayElement = dstArrayElement_;
+ return *this;
}
- operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
+ descriptorCount = descriptorCount_;
+ return *this;
+ }
+
+
+ operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+ }
+
+ operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyDescriptorSet*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
+ auto operator<=>( CopyDescriptorSet const& ) const = default;
#else
- bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
- && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
- && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
- && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+ && ( srcSet == rhs.srcSet )
+ && ( srcBinding == rhs.srcBinding )
+ && ( srcArrayElement == rhs.srcArrayElement )
+ && ( dstSet == rhs.dstSet )
+ && ( dstBinding == rhs.dstBinding )
+ && ( dstArrayElement == rhs.dstArrayElement )
+ && ( descriptorCount == rhs.descriptorCount );
}
- bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -27188,91 +32481,132 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
const void* pNext = {};
- uint32_t waitSemaphoreValuesCount = {};
- const uint64_t* pWaitSemaphoreValues = {};
- uint32_t signalSemaphoreValuesCount = {};
- const uint64_t* pSignalSemaphoreValues = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
+ uint32_t srcBinding = {};
+ uint32_t srcArrayElement = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+ uint32_t dstBinding = {};
+ uint32_t dstArrayElement = {};
+ uint32_t descriptorCount = {};
};
- static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
+ struct CppType<StructureType, StructureType::eCopyDescriptorSet>
{
- using Type = D3D12FenceSubmitInfoKHR;
+ using Type = CopyDescriptorSet;
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct DebugMarkerMarkerInfoEXT
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ struct D3D12FenceSubmitInfoKHR
{
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char* pMarkerName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
- : pMarkerName( pMarkerName_ ), color( color_ )
+ VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
{}
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
+ D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
+ : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
{}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
return *this;
}
- DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
- pMarkerName = pMarkerName_;
+ waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
return *this;
}
- DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
- color = color_;
+ pWaitSemaphoreValues = pWaitSemaphoreValues_;
return *this;
}
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+ pWaitSemaphoreValues = waitSemaphoreValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
+ signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+ return *this;
}
- operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
+ pSignalSemaphoreValues = pSignalSemaphoreValues_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+ pSignalSemaphoreValues = signalSemaphoreValues_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
+ }
+
+ operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default;
+ auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
#else
- bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( pMarkerName == rhs.pMarkerName )
- && ( color == rhs.color );
+ && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+ && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+ && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+ && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
}
- bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -27281,20 +32615,23 @@ namespace VULKAN_HPP_NAMESPACE
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
const void* pNext = {};
- const char* pMarkerName = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+ uint32_t waitSemaphoreValuesCount = {};
+ const uint64_t* pWaitSemaphoreValues = {};
+ uint32_t signalSemaphoreValuesCount = {};
+ const uint64_t* pSignalSemaphoreValues = {};
};
- static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
template <>
- struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
+ struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
{
- using Type = DebugMarkerMarkerInfoEXT;
+ using Type = D3D12FenceSubmitInfoKHR;
};
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
struct DebugMarkerObjectNameInfoEXT
{
@@ -27623,96 +32960,6 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DebugReportCallbackCreateInfoEXT;
};
- struct DebugUtilsLabelEXT
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
- : pLabelName( pLabelName_ ), color( color_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
- return *this;
- }
-
- DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
- {
- pLabelName = pLabelName_;
- return *this;
- }
-
- DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
- {
- color = color_;
- return *this;
- }
-
-
- operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
- }
-
- operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
-#else
- bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( pLabelName == rhs.pLabelName )
- && ( color == rhs.color );
- }
-
- bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
- const void* pNext = {};
- const char* pLabelName = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
-
- };
- static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
- {
- using Type = DebugUtilsLabelEXT;
- };
-
struct DebugUtilsObjectNameInfoEXT
{
static const bool allowDuplicate = false;
@@ -28497,368 +33744,6 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DedicatedAllocationMemoryAllocateInfoNV;
};
- struct DescriptorBufferInfo
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ ), offset( offset_ ), range( range_ )
- {}
-
- VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
- return *this;
- }
-
- DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
- {
- buffer = buffer_;
- return *this;
- }
-
- DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
- {
- offset = offset_;
- return *this;
- }
-
- DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
- {
- range = range_;
- return *this;
- }
-
-
- operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
- }
-
- operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( DescriptorBufferInfo const& ) const = default;
-#else
- bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( buffer == rhs.buffer )
- && ( offset == rhs.offset )
- && ( range == rhs.range );
- }
-
- bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize range = {};
-
- };
- static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
-
- class Sampler
- {
- public:
- using CType = VkSampler;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
-
- public:
- VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
- : m_sampler(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_sampler(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
- : m_sampler( sampler )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
- {
- m_sampler = sampler;
- return *this;
- }
-#endif
-
- Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_sampler = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( Sampler const& ) const = default;
-#else
- bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler == rhs.m_sampler;
- }
-
- bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler != rhs.m_sampler;
- }
-
- bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler < rhs.m_sampler;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_sampler == VK_NULL_HANDLE;
- }
-
- private:
- VkSampler m_sampler;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
- {
- using type = VULKAN_HPP_NAMESPACE::Sampler;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
- {
- using Type = VULKAN_HPP_NAMESPACE::Sampler;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
- {
- using Type = VULKAN_HPP_NAMESPACE::Sampler;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
- class ImageView
- {
- public:
- using CType = VkImageView;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
-
- public:
- VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
- : m_imageView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_imageView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
- : m_imageView( imageView )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
- {
- m_imageView = imageView;
- return *this;
- }
-#endif
-
- ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_imageView = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageView const& ) const = default;
-#else
- bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_imageView == rhs.m_imageView;
- }
-
- bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_imageView != rhs.m_imageView;
- }
-
- bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_imageView < rhs.m_imageView;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
- {
- return m_imageView;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_imageView != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_imageView == VK_NULL_HANDLE;
- }
-
- private:
- VkImageView m_imageView;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
- {
- using type = VULKAN_HPP_NAMESPACE::ImageView;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
- {
- using Type = VULKAN_HPP_NAMESPACE::ImageView;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
- {
- using Type = VULKAN_HPP_NAMESPACE::ImageView;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
- struct DescriptorImageInfo
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
- : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
- {}
-
- VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
- return *this;
- }
-
- DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
- {
- sampler = sampler_;
- return *this;
- }
-
- DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
- {
- imageView = imageView_;
- return *this;
- }
-
- DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
- {
- imageLayout = imageLayout_;
- return *this;
- }
-
-
- operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
- }
-
- operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkDescriptorImageInfo*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( DescriptorImageInfo const& ) const = default;
-#else
- bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sampler == rhs.sampler )
- && ( imageView == rhs.imageView )
- && ( imageLayout == rhs.imageLayout );
- }
-
- bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::Sampler sampler = {};
- VULKAN_HPP_NAMESPACE::ImageView imageView = {};
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
-
- };
- static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
-
struct DescriptorPoolSize
{
@@ -31788,3593 +36673,6 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
- class QueryPool
- {
- public:
- using CType = VkQueryPool;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
-
- public:
- VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
- : m_queryPool(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_queryPool(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
- : m_queryPool( queryPool )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
- {
- m_queryPool = queryPool;
- return *this;
- }
-#endif
-
- QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_queryPool = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( QueryPool const& ) const = default;
-#else
- bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_queryPool == rhs.m_queryPool;
- }
-
- bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_queryPool != rhs.m_queryPool;
- }
-
- bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_queryPool < rhs.m_queryPool;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
- {
- return m_queryPool;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_queryPool != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_queryPool == VK_NULL_HANDLE;
- }
-
- private:
- VkQueryPool m_queryPool;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
- {
- using type = VULKAN_HPP_NAMESPACE::QueryPool;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
- {
- using Type = VULKAN_HPP_NAMESPACE::QueryPool;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
- {
- using Type = VULKAN_HPP_NAMESPACE::QueryPool;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
- struct RenderPassBeginInfo
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
- {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
- : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
- {}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
- return *this;
- }
-
- RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
- {
- renderPass = renderPass_;
- return *this;
- }
-
- RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
- {
- framebuffer = framebuffer_;
- return *this;
- }
-
- RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
- {
- renderArea = renderArea_;
- return *this;
- }
-
- RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
- {
- clearValueCount = clearValueCount_;
- return *this;
- }
-
- RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
- {
- pClearValues = pClearValues_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
- {
- clearValueCount = static_cast<uint32_t>( clearValues_.size() );
- pClearValues = clearValues_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-
- operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
- }
-
- operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( RenderPassBeginInfo const& ) const = default;
-#else
- bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( renderPass == rhs.renderPass )
- && ( framebuffer == rhs.framebuffer )
- && ( renderArea == rhs.renderArea )
- && ( clearValueCount == rhs.clearValueCount )
- && ( pClearValues == rhs.pClearValues );
- }
-
- bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
- VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
- uint32_t clearValueCount = {};
- const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
-
- };
- static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
- {
- using Type = RenderPassBeginInfo;
- };
-
- struct SubpassBeginInfo
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
- : contents( contents_ )
- {}
-
- VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
- return *this;
- }
-
- SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
- {
- contents = contents_;
- return *this;
- }
-
-
- operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
- }
-
- operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassBeginInfo*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( SubpassBeginInfo const& ) const = default;
-#else
- bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( contents == rhs.contents );
- }
-
- bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
-
- };
- static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eSubpassBeginInfo>
- {
- using Type = SubpassBeginInfo;
- };
- using SubpassBeginInfoKHR = SubpassBeginInfo;
-
- struct ImageBlit
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
- return *this;
- }
-
- ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- srcSubresource = srcSubresource_;
- return *this;
- }
-
- ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
- {
- srcOffsets = srcOffsets_;
- return *this;
- }
-
- ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSubresource = dstSubresource_;
- return *this;
- }
-
- ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
- {
- dstOffsets = dstOffsets_;
- return *this;
- }
-
-
- operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageBlit*>( this );
- }
-
- operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageBlit*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageBlit const& ) const = default;
-#else
- bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( srcSubresource == rhs.srcSubresource )
- && ( srcOffsets == rhs.srcOffsets )
- && ( dstSubresource == rhs.dstSubresource )
- && ( dstOffsets == rhs.dstOffsets );
- }
-
- bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
-
- };
- static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
-
- struct ImageSubresourceRange
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
- : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
- return *this;
- }
-
- ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
- {
- aspectMask = aspectMask_;
- return *this;
- }
-
- ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
- {
- baseMipLevel = baseMipLevel_;
- return *this;
- }
-
- ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
- {
- levelCount = levelCount_;
- return *this;
- }
-
- ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
- {
- baseArrayLayer = baseArrayLayer_;
- return *this;
- }
-
- ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
- {
- layerCount = layerCount_;
- return *this;
- }
-
-
- operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageSubresourceRange*>( this );
- }
-
- operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageSubresourceRange*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageSubresourceRange const& ) const = default;
-#else
- bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( aspectMask == rhs.aspectMask )
- && ( baseMipLevel == rhs.baseMipLevel )
- && ( levelCount == rhs.levelCount )
- && ( baseArrayLayer == rhs.baseArrayLayer )
- && ( layerCount == rhs.layerCount );
- }
-
- bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
- uint32_t baseMipLevel = {};
- uint32_t levelCount = {};
- uint32_t baseArrayLayer = {};
- uint32_t layerCount = {};
-
- };
- static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
-
- struct ImageCopy
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
- return *this;
- }
-
- ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- srcSubresource = srcSubresource_;
- return *this;
- }
-
- ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- srcOffset = srcOffset_;
- return *this;
- }
-
- ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSubresource = dstSubresource_;
- return *this;
- }
-
- ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- dstOffset = dstOffset_;
- return *this;
- }
-
- ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
- {
- extent = extent_;
- return *this;
- }
-
-
- operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageCopy*>( this );
- }
-
- operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageCopy*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageCopy const& ) const = default;
-#else
- bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( srcSubresource == rhs.srcSubresource )
- && ( srcOffset == rhs.srcOffset )
- && ( dstSubresource == rhs.dstSubresource )
- && ( dstOffset == rhs.dstOffset )
- && ( extent == rhs.extent );
- }
-
- bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
- VULKAN_HPP_NAMESPACE::Extent3D extent = {};
-
- };
- static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
-
- struct SubpassEndInfo
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
-
- {}
-
- VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
- return *this;
- }
-
- SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
-
- operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkSubpassEndInfo*>( this );
- }
-
- operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkSubpassEndInfo*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( SubpassEndInfo const& ) const = default;
-#else
- bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext );
- }
-
- bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
- const void* pNext = {};
-
- };
- static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eSubpassEndInfo>
- {
- using Type = SubpassEndInfo;
- };
- using SubpassEndInfoKHR = SubpassEndInfo;
-
- class IndirectCommandsLayoutNV
- {
- public:
- using CType = VkIndirectCommandsLayoutNV;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
-
- public:
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
- : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
- : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
- {
- m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
- return *this;
- }
-#endif
-
- IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
-#else
- bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
- }
-
- bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
- }
-
- bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
- {
- return m_indirectCommandsLayoutNV;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
- }
-
- private:
- VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
- {
- using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
- {
- using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
- };
-
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
- struct IndirectCommandsStreamNV
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ ), offset( offset_ )
- {}
-
- VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
- return *this;
- }
-
- IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
- {
- buffer = buffer_;
- return *this;
- }
-
- IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
- {
- offset = offset_;
- return *this;
- }
-
-
- operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
- }
-
- operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
-#else
- bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( buffer == rhs.buffer )
- && ( offset == rhs.offset );
- }
-
- bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
-
- };
- static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
-
- struct GeneratedCommandsInfoNV
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
- : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
- {}
-
- VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
- {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
- : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
- {}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
- {
- pipelineBindPoint = pipelineBindPoint_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
- {
- pipeline = pipeline_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
- {
- indirectCommandsLayout = indirectCommandsLayout_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
- {
- streamCount = streamCount_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
- {
- pStreams = pStreams_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
- {
- streamCount = static_cast<uint32_t>( streams_.size() );
- pStreams = streams_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
- GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesCount = sequencesCount_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
- {
- preprocessBuffer = preprocessBuffer_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- preprocessOffset = preprocessOffset_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
- {
- preprocessSize = preprocessSize_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesCountBuffer = sequencesCountBuffer_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesCountOffset = sequencesCountOffset_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesIndexBuffer = sequencesIndexBuffer_;
- return *this;
- }
-
- GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- sequencesIndexOffset = sequencesIndexOffset_;
- return *this;
- }
-
-
- operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
- }
-
- operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
-#else
- bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( pipelineBindPoint == rhs.pipelineBindPoint )
- && ( pipeline == rhs.pipeline )
- && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
- && ( streamCount == rhs.streamCount )
- && ( pStreams == rhs.pStreams )
- && ( sequencesCount == rhs.sequencesCount )
- && ( preprocessBuffer == rhs.preprocessBuffer )
- && ( preprocessOffset == rhs.preprocessOffset )
- && ( preprocessSize == rhs.preprocessSize )
- && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
- && ( sequencesCountOffset == rhs.sequencesCountOffset )
- && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
- && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
- }
-
- bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
- uint32_t streamCount = {};
- const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
- uint32_t sequencesCount = {};
- VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
- VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
- VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
-
- };
- static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
- {
- using Type = GeneratedCommandsInfoNV;
- };
-
- struct MemoryBarrier
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
- {}
-
- VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
- return *this;
- }
-
- MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- srcAccessMask = srcAccessMask_;
- return *this;
- }
-
- MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- dstAccessMask = dstAccessMask_;
- return *this;
- }
-
-
- operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkMemoryBarrier*>( this );
- }
-
- operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkMemoryBarrier*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( MemoryBarrier const& ) const = default;
-#else
- bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask );
- }
-
- bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
-
- };
- static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eMemoryBarrier>
- {
- using Type = MemoryBarrier;
- };
-
- struct ImageMemoryBarrier
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
- return *this;
- }
-
- ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- srcAccessMask = srcAccessMask_;
- return *this;
- }
-
- ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
- {
- dstAccessMask = dstAccessMask_;
- return *this;
- }
-
- ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
- {
- oldLayout = oldLayout_;
- return *this;
- }
-
- ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
- {
- newLayout = newLayout_;
- return *this;
- }
-
- ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
- {
- srcQueueFamilyIndex = srcQueueFamilyIndex_;
- return *this;
- }
-
- ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
- {
- dstQueueFamilyIndex = dstQueueFamilyIndex_;
- return *this;
- }
-
- ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
- {
- image = image_;
- return *this;
- }
-
- ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
- {
- subresourceRange = subresourceRange_;
- return *this;
- }
-
-
- operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
- }
-
- operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageMemoryBarrier*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageMemoryBarrier const& ) const = default;
-#else
- bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( srcAccessMask == rhs.srcAccessMask )
- && ( dstAccessMask == rhs.dstAccessMask )
- && ( oldLayout == rhs.oldLayout )
- && ( newLayout == rhs.newLayout )
- && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
- && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
- && ( image == rhs.image )
- && ( subresourceRange == rhs.subresourceRange );
- }
-
- bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
- VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- uint32_t srcQueueFamilyIndex = {};
- uint32_t dstQueueFamilyIndex = {};
- VULKAN_HPP_NAMESPACE::Image image = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
-
- };
- static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eImageMemoryBarrier>
- {
- using Type = ImageMemoryBarrier;
- };
-
- class BufferView
- {
- public:
- using CType = VkBufferView;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
-
- public:
- VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
- : m_bufferView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_bufferView(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
- : m_bufferView( bufferView )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
- {
- m_bufferView = bufferView;
- return *this;
- }
-#endif
-
- BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_bufferView = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( BufferView const& ) const = default;
-#else
- bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView == rhs.m_bufferView;
- }
-
- bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView != rhs.m_bufferView;
- }
-
- bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView < rhs.m_bufferView;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_bufferView == VK_NULL_HANDLE;
- }
-
- private:
- VkBufferView m_bufferView;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
- {
- using type = VULKAN_HPP_NAMESPACE::BufferView;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
- {
- using Type = VULKAN_HPP_NAMESPACE::BufferView;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
- {
- using Type = VULKAN_HPP_NAMESPACE::BufferView;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
- struct WriteDescriptorSet
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
- : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
- {}
-
- VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
- {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
- : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
- {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
-#else
- if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
- {
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
- }
-#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
- return *this;
- }
-
- WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSet = dstSet_;
- return *this;
- }
-
- WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
- {
- dstBinding = dstBinding_;
- return *this;
- }
-
- WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
- {
- dstArrayElement = dstArrayElement_;
- return *this;
- }
-
- WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
- {
- descriptorCount = descriptorCount_;
- return *this;
- }
-
- WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
- {
- descriptorType = descriptorType_;
- return *this;
- }
-
- WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
- {
- pImageInfo = pImageInfo_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
- {
- descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
- pImageInfo = imageInfo_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
- WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
- {
- pBufferInfo = pBufferInfo_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
- {
- descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
- pBufferInfo = bufferInfo_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
- WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
- {
- pTexelBufferView = pTexelBufferView_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
- {
- descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
- pTexelBufferView = texelBufferView_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-
- operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
- }
-
- operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkWriteDescriptorSet*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( WriteDescriptorSet const& ) const = default;
-#else
- bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( dstSet == rhs.dstSet )
- && ( dstBinding == rhs.dstBinding )
- && ( dstArrayElement == rhs.dstArrayElement )
- && ( descriptorCount == rhs.descriptorCount )
- && ( descriptorType == rhs.descriptorType )
- && ( pImageInfo == rhs.pImageInfo )
- && ( pBufferInfo == rhs.pBufferInfo )
- && ( pTexelBufferView == rhs.pTexelBufferView );
- }
-
- bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
- uint32_t dstBinding = {};
- uint32_t dstArrayElement = {};
- uint32_t descriptorCount = {};
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
- const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
- const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
- const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
-
- };
- static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eWriteDescriptorSet>
- {
- using Type = WriteDescriptorSet;
- };
-
- class DescriptorUpdateTemplate
- {
- public:
- using CType = VkDescriptorUpdateTemplate;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
-
- public:
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
- : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
- : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
- {
- m_descriptorUpdateTemplate = descriptorUpdateTemplate;
- return *this;
- }
-#endif
-
- DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_descriptorUpdateTemplate = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
-#else
- bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
- }
-
- bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
- }
-
- bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
- }
-
- private:
- VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
- {
- using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
- {
- using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
- {
- using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
- using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
-
- class Event
- {
- public:
- using CType = VkEvent;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
-
- public:
- VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
- : m_event(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_event(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
- : m_event( event )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
- {
- m_event = event;
- return *this;
- }
-#endif
-
- Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_event = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( Event const& ) const = default;
-#else
- bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_event == rhs.m_event;
- }
-
- bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_event != rhs.m_event;
- }
-
- bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_event < rhs.m_event;
- }
-#endif
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
- {
- return m_event;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_event != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_event == VK_NULL_HANDLE;
- }
-
- private:
- VkEvent m_event;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
- {
- using type = VULKAN_HPP_NAMESPACE::Event;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
- {
- using Type = VULKAN_HPP_NAMESPACE::Event;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
- {
- using Type = VULKAN_HPP_NAMESPACE::Event;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
- struct ImageResolve
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
- return *this;
- }
-
- ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- srcSubresource = srcSubresource_;
- return *this;
- }
-
- ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- srcOffset = srcOffset_;
- return *this;
- }
-
- ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSubresource = dstSubresource_;
- return *this;
- }
-
- ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- dstOffset = dstOffset_;
- return *this;
- }
-
- ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
- {
- extent = extent_;
- return *this;
- }
-
-
- operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageResolve*>( this );
- }
-
- operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageResolve*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageResolve const& ) const = default;
-#else
- bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( srcSubresource == rhs.srcSubresource )
- && ( srcOffset == rhs.srcOffset )
- && ( dstSubresource == rhs.dstSubresource )
- && ( dstOffset == rhs.dstOffset )
- && ( extent == rhs.extent );
- }
-
- bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
- VULKAN_HPP_NAMESPACE::Extent3D extent = {};
-
- };
- static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
-
- struct ImageResolve2KHR
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageResolve2KHR( *reinterpret_cast<ImageResolve2KHR const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
- return *this;
- }
-
- ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- srcSubresource = srcSubresource_;
- return *this;
- }
-
- ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- srcOffset = srcOffset_;
- return *this;
- }
-
- ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
- {
- dstSubresource = dstSubresource_;
- return *this;
- }
-
- ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- dstOffset = dstOffset_;
- return *this;
- }
-
- ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
- {
- extent = extent_;
- return *this;
- }
-
-
- operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkImageResolve2KHR*>( this );
- }
-
- operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkImageResolve2KHR*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ImageResolve2KHR const& ) const = default;
-#else
- bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( srcSubresource == rhs.srcSubresource )
- && ( srcOffset == rhs.srcOffset )
- && ( dstSubresource == rhs.dstSubresource )
- && ( dstOffset == rhs.dstOffset )
- && ( extent == rhs.extent );
- }
-
- bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
- VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
- VULKAN_HPP_NAMESPACE::Extent3D extent = {};
-
- };
- static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eImageResolve2KHR>
- {
- using Type = ImageResolve2KHR;
- };
-
- struct ResolveImageInfo2KHR
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
- : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : ResolveImageInfo2KHR( *reinterpret_cast<ResolveImageInfo2KHR const *>( &rhs ) )
- {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
- : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
- {}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
- return *this;
- }
-
- ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
- {
- srcImage = srcImage_;
- return *this;
- }
-
- ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
- {
- srcImageLayout = srcImageLayout_;
- return *this;
- }
-
- ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
- {
- dstImage = dstImage_;
- return *this;
- }
-
- ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
- {
- dstImageLayout = dstImageLayout_;
- return *this;
- }
-
- ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
- {
- regionCount = regionCount_;
- return *this;
- }
-
- ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
- {
- pRegions = pRegions_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
- {
- regionCount = static_cast<uint32_t>( regions_.size() );
- pRegions = regions_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-
- operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
- }
-
- operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
-#else
- bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( srcImage == rhs.srcImage )
- && ( srcImageLayout == rhs.srcImageLayout )
- && ( dstImage == rhs.dstImage )
- && ( dstImageLayout == rhs.dstImageLayout )
- && ( regionCount == rhs.regionCount )
- && ( pRegions == rhs.pRegions );
- }
-
- bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::Image srcImage = {};
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- VULKAN_HPP_NAMESPACE::Image dstImage = {};
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
- uint32_t regionCount = {};
- const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
-
- };
- static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
- {
- using Type = ResolveImageInfo2KHR;
- };
-
- struct PerformanceMarkerInfoINTEL
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
- {}
-
- VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
- return *this;
- }
-
- PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
- {
- marker = marker_;
- return *this;
- }
-
-
- operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
- }
-
- operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
-#else
- bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( marker == rhs.marker );
- }
-
- bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
- const void* pNext = {};
- uint64_t marker = {};
-
- };
- static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
- {
- using Type = PerformanceMarkerInfoINTEL;
- };
-
- struct PerformanceOverrideInfoINTEL
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
- : type( type_ ), enable( enable_ ), parameter( parameter_ )
- {}
-
- VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
- return *this;
- }
-
- PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
- {
- type = type_;
- return *this;
- }
-
- PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
- {
- enable = enable_;
- return *this;
- }
-
- PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
- {
- parameter = parameter_;
- return *this;
- }
-
-
- operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
- }
-
- operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
-#else
- bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( type == rhs.type )
- && ( enable == rhs.enable )
- && ( parameter == rhs.parameter );
- }
-
- bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
- const void* pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
- VULKAN_HPP_NAMESPACE::Bool32 enable = {};
- uint64_t parameter = {};
-
- };
- static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
- {
- using Type = PerformanceOverrideInfoINTEL;
- };
-
- struct PerformanceStreamMarkerInfoINTEL
- {
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
- {}
-
- VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
- return *this;
- }
-
- PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
- {
- marker = marker_;
- return *this;
- }
-
-
- operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
- }
-
- operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
-#else
- bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( sType == rhs.sType )
- && ( pNext == rhs.pNext )
- && ( marker == rhs.marker );
- }
-
- bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
- const void* pNext = {};
- uint32_t marker = {};
-
- };
- static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
-
- template <>
- struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
- {
- using Type = PerformanceStreamMarkerInfoINTEL;
- };
-
- struct Viewport
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
- : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
- {}
-
- VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
- : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
- return *this;
- }
-
- Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
- {
- x = x_;
- return *this;
- }
-
- Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
- {
- y = y_;
- return *this;
- }
-
- Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
- {
- width = width_;
- return *this;
- }
-
- Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
- {
- height = height_;
- return *this;
- }
-
- Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
- {
- minDepth = minDepth_;
- return *this;
- }
-
- Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
- {
- maxDepth = maxDepth_;
- return *this;
- }
-
-
- operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkViewport*>( this );
- }
-
- operator VkViewport &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkViewport*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( Viewport const& ) const = default;
-#else
- bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( x == rhs.x )
- && ( y == rhs.y )
- && ( width == rhs.width )
- && ( height == rhs.height )
- && ( minDepth == rhs.minDepth )
- && ( maxDepth == rhs.maxDepth );
- }
-
- bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- float x = {};
- float y = {};
- float width = {};
- float height = {};
- float minDepth = {};
- float maxDepth = {};
-
- };
- static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
-
- struct ShadingRatePaletteNV
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
- : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
- {}
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
- : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
- {}
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
- return *this;
- }
-
- ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
- {
- shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
- return *this;
- }
-
- ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
- {
- pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
- return *this;
- }
-
-#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
- ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
- {
- shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
- pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
- return *this;
- }
-#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
-
-
- operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
- }
-
- operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ShadingRatePaletteNV const& ) const = default;
-#else
- bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
- && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
- }
-
- bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- uint32_t shadingRatePaletteEntryCount = {};
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
-
- };
- static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
-
- struct ViewportWScalingNV
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
- : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
- {}
-
- VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
- return *this;
- }
-
- ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
- {
- xcoeff = xcoeff_;
- return *this;
- }
-
- ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
- {
- ycoeff = ycoeff_;
- return *this;
- }
-
-
- operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkViewportWScalingNV*>( this );
- }
-
- operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkViewportWScalingNV*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( ViewportWScalingNV const& ) const = default;
-#else
- bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( xcoeff == rhs.xcoeff )
- && ( ycoeff == rhs.ycoeff );
- }
-
- bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- float xcoeff = {};
- float ycoeff = {};
-
- };
- static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
-
- struct StridedDeviceAddressRegionKHR
- {
-
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
- : deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
- {}
-
- VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
- {}
-#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
-
- VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
- return *this;
- }
-
- StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
- {
- deviceAddress = deviceAddress_;
- return *this;
- }
-
- StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
- {
- stride = stride_;
- return *this;
- }
-
- StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
- {
- size = size_;
- return *this;
- }
-
-
- operator VkStridedDeviceAddressRegionKHR const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
- }
-
- operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
- }
-
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( StridedDeviceAddressRegionKHR const& ) const = default;
-#else
- bool operator==( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return ( deviceAddress == rhs.deviceAddress )
- && ( stride == rhs.stride )
- && ( size == rhs.size );
- }
-
- bool operator!=( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-#endif
-
-
-
- public:
- VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
- VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
-
- };
- static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
-
- class CommandBuffer
- {
- public:
- using CType = VkCommandBuffer;
-
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
-
- public:
- VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
- : m_commandBuffer(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_commandBuffer(VK_NULL_HANDLE)
- {}
-
- VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
- : m_commandBuffer( commandBuffer )
- {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
- CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
- {
- m_commandBuffer = commandBuffer;
- return *this;
- }
-#endif
-
- CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- {
- m_commandBuffer = VK_NULL_HANDLE;
- return *this;
- }
-
-#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
- auto operator<=>( CommandBuffer const& ) const = default;
-#else
- bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer == rhs.m_commandBuffer;
- }
-
- bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer != rhs.m_commandBuffer;
- }
-
- bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer < rhs.m_commandBuffer;
- }
-#endif
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, ArrayProxy<const uint32_t> const & indirectStrides, ArrayProxy<const uint32_t* const > const & pMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer;
- }
-
- explicit operator bool() const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer != VK_NULL_HANDLE;
- }
-
- bool operator!() const VULKAN_HPP_NOEXCEPT
- {
- return m_commandBuffer == VK_NULL_HANDLE;
- }
-
- private:
- VkCommandBuffer m_commandBuffer;
- };
- static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
- {
- using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
- };
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
- {
- using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
- };
-
-
- template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
- {
- using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
- };
-
-
- template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
- {
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
- };
-
struct MemoryAllocateInfo
{
static const bool allowDuplicate = false;
@@ -42930,6 +44228,275 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SubmitInfo;
};
+ struct SemaphoreSubmitInfoKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ = {}, uint32_t deviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ ), value( value_ ), stageMask( stageMask_ ), deviceIndex( deviceIndex_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreSubmitInfoKHR( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SemaphoreSubmitInfoKHR( *reinterpret_cast<SemaphoreSubmitInfoKHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfoKHR & operator=( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SemaphoreSubmitInfoKHR & operator=( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+ SemaphoreSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SemaphoreSubmitInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ SemaphoreSubmitInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+ {
+ value = value_;
+ return *this;
+ }
+
+ SemaphoreSubmitInfoKHR & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ stageMask = stageMask_;
+ return *this;
+ }
+
+ SemaphoreSubmitInfoKHR & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceIndex = deviceIndex_;
+ return *this;
+ }
+
+
+ operator VkSemaphoreSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSemaphoreSubmitInfoKHR*>( this );
+ }
+
+ operator VkSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSemaphoreSubmitInfoKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SemaphoreSubmitInfoKHR const& ) const = default;
+#else
+ bool operator==( SemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( semaphore == rhs.semaphore )
+ && ( value == rhs.value )
+ && ( stageMask == rhs.stageMask )
+ && ( deviceIndex == rhs.deviceIndex );
+ }
+
+ bool operator!=( SemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ uint64_t value = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask = {};
+ uint32_t deviceIndex = {};
+
+ };
+ static_assert( sizeof( SemaphoreSubmitInfoKHR ) == sizeof( VkSemaphoreSubmitInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSemaphoreSubmitInfoKHR>
+ {
+ using Type = SemaphoreSubmitInfoKHR;
+ };
+
+ struct SubmitInfo2KHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubmitInfo2KHR(VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ = {}, uint32_t waitSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pWaitSemaphoreInfos_ = {}, uint32_t commandBufferInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR* pCommandBufferInfos_ = {}, uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pSignalSemaphoreInfos_ = {}) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ ), waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ), pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ), commandBufferInfoCount( commandBufferInfoCount_ ), pCommandBufferInfos( pCommandBufferInfos_ ), signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ), pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubmitInfo2KHR( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubmitInfo2KHR( *reinterpret_cast<SubmitInfo2KHR const *>( &rhs ) )
+ {}
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo2KHR( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & waitSemaphoreInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR> const & commandBufferInfos_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & signalSemaphoreInfos_ = {} )
+ : flags( flags_ ), waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) ), pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ), commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) ), pCommandBufferInfos( commandBufferInfos_.data() ), signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) ), pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
+ {}
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2KHR & operator=( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubmitInfo2KHR & operator=( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2KHR const *>( &rhs );
+ return *this;
+ }
+
+ SubmitInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ SubmitInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ SubmitInfo2KHR & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
+ return *this;
+ }
+
+ SubmitInfo2KHR & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo2KHR & setWaitSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
+ pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubmitInfo2KHR & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ commandBufferInfoCount = commandBufferInfoCount_;
+ return *this;
+ }
+
+ SubmitInfo2KHR & setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR* pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCommandBufferInfos = pCommandBufferInfos_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo2KHR & setCommandBufferInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
+ pCommandBufferInfos = commandBufferInfos_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+ SubmitInfo2KHR & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
+ return *this;
+ }
+
+ SubmitInfo2KHR & setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
+ return *this;
+ }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+ SubmitInfo2KHR & setSignalSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
+ pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
+ return *this;
+ }
+#endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+ operator VkSubmitInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubmitInfo2KHR*>( this );
+ }
+
+ operator VkSubmitInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubmitInfo2KHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( SubmitInfo2KHR const& ) const = default;
+#else
+ bool operator==( SubmitInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount )
+ && ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos )
+ && ( commandBufferInfoCount == rhs.commandBufferInfoCount )
+ && ( pCommandBufferInfos == rhs.pCommandBufferInfos )
+ && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount )
+ && ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
+ }
+
+ bool operator!=( SubmitInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags = {};
+ uint32_t waitSemaphoreInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pWaitSemaphoreInfos = {};
+ uint32_t commandBufferInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR* pCommandBufferInfos = {};
+ uint32_t signalSemaphoreInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR* pSignalSemaphoreInfos = {};
+
+ };
+ static_assert( sizeof( SubmitInfo2KHR ) == sizeof( VkSubmitInfo2KHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubmitInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eSubmitInfo2KHR>
+ {
+ using Type = SubmitInfo2KHR;
+ };
+
class Queue
{
public:
@@ -42986,6 +44553,16 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getCheckpointData2NV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV* pCheckpointData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CheckpointData2NVAllocator, typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -43049,6 +44626,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
@@ -46989,14 +48574,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -47104,14 +48689,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -47208,14 +48793,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -47228,14 +48813,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -47342,14 +48927,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -47936,7 +49521,7 @@ namespace VULKAN_HPP_NAMESPACE
void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR* pSizeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const AccelerationStructureBuildGeometryInfoKHR & buildInfo, ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const AccelerationStructureBuildGeometryInfoKHR & buildInfo, ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -69809,6 +71394,88 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT;
};
+ struct PhysicalDeviceSynchronization2FeaturesKHR
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}) VULKAN_HPP_NOEXCEPT
+ : synchronization2( synchronization2_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSynchronization2FeaturesKHR( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSynchronization2FeaturesKHR( *reinterpret_cast<PhysicalDeviceSynchronization2FeaturesKHR const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2FeaturesKHR & operator=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSynchronization2FeaturesKHR & operator=( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR const *>( &rhs );
+ return *this;
+ }
+
+ PhysicalDeviceSynchronization2FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceSynchronization2FeaturesKHR & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
+ {
+ synchronization2 = synchronization2_;
+ return *this;
+ }
+
+
+ operator VkPhysicalDeviceSynchronization2FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSynchronization2FeaturesKHR*>( this );
+ }
+
+ operator VkPhysicalDeviceSynchronization2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSynchronization2FeaturesKHR*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( PhysicalDeviceSynchronization2FeaturesKHR const& ) const = default;
+#else
+ bool operator==( PhysicalDeviceSynchronization2FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( synchronization2 == rhs.synchronization2 );
+ }
+
+ bool operator!=( PhysicalDeviceSynchronization2FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
+
+ };
+ static_assert( sizeof( PhysicalDeviceSynchronization2FeaturesKHR ) == sizeof( VkPhysicalDeviceSynchronization2FeaturesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceSynchronization2FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2FeaturesKHR>
+ {
+ using Type = PhysicalDeviceSynchronization2FeaturesKHR;
+ };
+
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
{
static const bool allowDuplicate = false;
@@ -75278,6 +76945,76 @@ namespace VULKAN_HPP_NAMESPACE
};
using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
+ struct QueueFamilyCheckpointProperties2NV
+ {
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2Nv;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
+ : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+ {}
+
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
+ {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
+ return *this;
+ }
+
+
+ operator VkQueueFamilyCheckpointProperties2NV const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV*>( this );
+ }
+
+ operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV*>( this );
+ }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+ auto operator<=>( QueueFamilyCheckpointProperties2NV const& ) const = default;
+#else
+ bool operator==( QueueFamilyCheckpointProperties2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+ }
+
+ bool operator!=( QueueFamilyCheckpointProperties2NV const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2Nv;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask = {};
+
+ };
+ static_assert( sizeof( QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueueFamilyCheckpointProperties2NV>::value, "struct wrapper is not a standard layout!" );
+
+ template <>
+ struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2Nv>
+ {
+ using Type = QueueFamilyCheckpointProperties2NV;
+ };
+
struct QueueFamilyCheckpointPropertiesNV
{
static const bool allowDuplicate = false;
@@ -81047,6 +82784,21 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
@@ -81106,6 +82858,13 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2KHR>( stageMask ) );
+ }
+
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
@@ -81256,6 +83015,21 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const DependencyInfoKHR & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
@@ -81583,6 +83357,30 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfoKHR* pDependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdWaitEvents2KHR( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfos ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
+#else
+ if ( events.size() != dependencyInfos.size() )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+ }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+ d.vkCmdWaitEvents2KHR( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfoKHR *>( dependencyInfos.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
@@ -81613,6 +83411,13 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+ }
+
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
@@ -81626,6 +83431,13 @@ namespace VULKAN_HPP_NAMESPACE
}
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+ }
+
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
@@ -82277,7 +84089,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82322,7 +84134,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82580,7 +84392,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82625,7 +84437,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82856,7 +84668,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82901,7 +84713,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82936,7 +84748,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -82981,7 +84793,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
@@ -83238,7 +85050,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
@@ -83283,7 +85095,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
{
SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
@@ -89879,6 +91691,39 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
+ VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV* pCheckpointData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast< VkCheckpointData2NV *>( pCheckpointData ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename CheckpointData2NVAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( Dispatch const & d ) const
+ {
+ std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
+ uint32_t checkpointDataCount;
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+ checkpointData.resize( checkpointDataCount );
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ return checkpointData;
+ }
+
+ template <typename CheckpointData2NVAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type >
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
+ {
+ std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
+ uint32_t checkpointDataCount;
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
+ checkpointData.resize( checkpointDataCount );
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ return checkpointData;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+ template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast< VkCheckpointDataNV *>( pCheckpointData ) );
@@ -90013,6 +91858,22 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2KHR *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+ {
+ Result result = static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2KHR *>( submits.data() ), static_cast<VkFence>( fence ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
@@ -90124,6 +91985,7 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<MemoryBarrier2KHR, SubpassDependency2>{ enum { value = true }; };
template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
@@ -90131,6 +91993,7 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorPoolCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo2KHR>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
@@ -90312,6 +92175,8 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSynchronization2FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+ template <> struct StructExtends<PhysicalDeviceSynchronization2FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
@@ -90380,6 +92245,7 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
+ template <> struct StructExtends<QueueFamilyCheckpointProperties2NV, QueueFamilyProperties2>{ enum { value = true }; };
template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
@@ -90389,6 +92255,7 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
+ template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier2KHR>{ enum { value = true }; };
template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
@@ -90420,9 +92287,11 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo2KHR>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
+ template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2KHR>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
template <> struct StructExtends<WriteDescriptorSetAccelerationStructureNV, WriteDescriptorSet>{ enum { value = true }; };
@@ -90631,11 +92500,13 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+ PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
PFN_vkCmdPushConstants vkCmdPushConstants = 0;
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+ PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
PFN_vkCmdResolveImage vkCmdResolveImage = 0;
PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
@@ -90653,6 +92524,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+ PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
@@ -90681,10 +92553,13 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+ PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+ PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+ PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
@@ -90966,6 +92841,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+ PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
@@ -91010,6 +92886,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
PFN_vkQueueSubmit vkQueueSubmit = 0;
+ PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
@@ -91361,11 +93238,13 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+ vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+ vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) );
vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
@@ -91384,6 +93263,7 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+ vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) );
vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
@@ -91412,10 +93292,13 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+ vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) );
vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+ vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+ vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
@@ -91583,6 +93466,7 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+ vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
@@ -91623,6 +93507,7 @@ namespace VULKAN_HPP_NAMESPACE
vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+ vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) );
vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
@@ -91770,11 +93655,13 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+ vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+ vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
@@ -91793,6 +93680,7 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+ vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
@@ -91821,10 +93709,13 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+ vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+ vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+ vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
@@ -91992,6 +93883,7 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+ vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
@@ -92032,6 +93924,7 @@ namespace VULKAN_HPP_NAMESPACE
vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+ vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
index 9ba5731..1bbdab8 100644
--- a/include/vulkan/vulkan_core.h
+++ b/include/vulkan/vulkan_core.h
@@ -43,7 +43,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 169
+#define VK_HEADER_VERSION 170
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION)
@@ -600,6 +600,16 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+ VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = 1000314000,
+ VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = 1000314001,
+ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = 1000314002,
+ VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = 1000314003,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = 1000314004,
+ VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = 1000314005,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = 1000314006,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = 1000314007,
+ VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008,
+ VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = 1000325000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001,
@@ -761,6 +771,8 @@ typedef enum VkImageLayout {
VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003,
VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
+ VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = 1000314000,
+ VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = 1000314001,
VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
@@ -1510,6 +1522,7 @@ typedef enum VkAccessFlagBits {
VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000,
VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000,
+ VK_ACCESS_NONE_KHR = 0,
VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
@@ -1699,6 +1712,7 @@ typedef enum VkPipelineStageFlagBits {
VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000,
VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000,
+ VK_PIPELINE_STAGE_NONE_KHR = 0,
VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
@@ -1727,6 +1741,11 @@ typedef enum VkFenceCreateFlagBits {
} VkFenceCreateFlagBits;
typedef VkFlags VkFenceCreateFlags;
typedef VkFlags VkSemaphoreCreateFlags;
+
+typedef enum VkEventCreateFlagBits {
+ VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = 0x00000001,
+ VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkEventCreateFlagBits;
typedef VkFlags VkEventCreateFlags;
typedef enum VkQueryPipelineStatisticFlagBits {
@@ -7521,6 +7540,250 @@ typedef struct VkPipelineLibraryCreateInfoKHR {
#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info"
+#define VK_KHR_synchronization2 1
+typedef uint64_t VkFlags64;
+#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1
+#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2"
+typedef VkFlags64 VkPipelineStageFlags2KHR;
+
+// Flag bits for VkPipelineStageFlags2KHR
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_NONE_KHR = 0;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000;
+static const VkPipelineStageFlags2KHR VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000;
+
+typedef VkFlags64 VkAccessFlags2KHR;
+
+// Flag bits for VkAccessFlags2KHR
+static const VkAccessFlags2KHR VK_ACCESS_2_NONE_KHR = 0;
+static const VkAccessFlags2KHR VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001;
+static const VkAccessFlags2KHR VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002;
+static const VkAccessFlags2KHR VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004;
+static const VkAccessFlags2KHR VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008;
+static const VkAccessFlags2KHR VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040;
+static const VkAccessFlags2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080;
+static const VkAccessFlags2KHR VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100;
+static const VkAccessFlags2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200;
+static const VkAccessFlags2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000;
+static const VkAccessFlags2KHR VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000;
+static const VkAccessFlags2KHR VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000;
+static const VkAccessFlags2KHR VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000;
+static const VkAccessFlags2KHR VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000;
+static const VkAccessFlags2KHR VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000;
+static const VkAccessFlags2KHR VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000;
+static const VkAccessFlags2KHR VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000;
+static const VkAccessFlags2KHR VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000;
+static const VkAccessFlags2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000;
+static const VkAccessFlags2KHR VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000;
+static const VkAccessFlags2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000;
+
+
+typedef enum VkSubmitFlagBitsKHR {
+ VK_SUBMIT_PROTECTED_BIT_KHR = 0x00000001,
+ VK_SUBMIT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSubmitFlagBitsKHR;
+typedef VkFlags VkSubmitFlagsKHR;
+typedef struct VkMemoryBarrier2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineStageFlags2KHR srcStageMask;
+ VkAccessFlags2KHR srcAccessMask;
+ VkPipelineStageFlags2KHR dstStageMask;
+ VkAccessFlags2KHR dstAccessMask;
+} VkMemoryBarrier2KHR;
+
+typedef struct VkBufferMemoryBarrier2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineStageFlags2KHR srcStageMask;
+ VkAccessFlags2KHR srcAccessMask;
+ VkPipelineStageFlags2KHR dstStageMask;
+ VkAccessFlags2KHR dstAccessMask;
+ uint32_t srcQueueFamilyIndex;
+ uint32_t dstQueueFamilyIndex;
+ VkBuffer buffer;
+ VkDeviceSize offset;
+ VkDeviceSize size;
+} VkBufferMemoryBarrier2KHR;
+
+typedef struct VkImageMemoryBarrier2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineStageFlags2KHR srcStageMask;
+ VkAccessFlags2KHR srcAccessMask;
+ VkPipelineStageFlags2KHR dstStageMask;
+ VkAccessFlags2KHR dstAccessMask;
+ VkImageLayout oldLayout;
+ VkImageLayout newLayout;
+ uint32_t srcQueueFamilyIndex;
+ uint32_t dstQueueFamilyIndex;
+ VkImage image;
+ VkImageSubresourceRange subresourceRange;
+} VkImageMemoryBarrier2KHR;
+
+typedef struct VkDependencyInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkDependencyFlags dependencyFlags;
+ uint32_t memoryBarrierCount;
+ const VkMemoryBarrier2KHR* pMemoryBarriers;
+ uint32_t bufferMemoryBarrierCount;
+ const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers;
+ uint32_t imageMemoryBarrierCount;
+ const VkImageMemoryBarrier2KHR* pImageMemoryBarriers;
+} VkDependencyInfoKHR;
+
+typedef struct VkSemaphoreSubmitInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkSemaphore semaphore;
+ uint64_t value;
+ VkPipelineStageFlags2KHR stageMask;
+ uint32_t deviceIndex;
+} VkSemaphoreSubmitInfoKHR;
+
+typedef struct VkCommandBufferSubmitInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkCommandBuffer commandBuffer;
+ uint32_t deviceMask;
+} VkCommandBufferSubmitInfoKHR;
+
+typedef struct VkSubmitInfo2KHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkSubmitFlagsKHR flags;
+ uint32_t waitSemaphoreInfoCount;
+ const VkSemaphoreSubmitInfoKHR* pWaitSemaphoreInfos;
+ uint32_t commandBufferInfoCount;
+ const VkCommandBufferSubmitInfoKHR* pCommandBufferInfos;
+ uint32_t signalSemaphoreInfoCount;
+ const VkSemaphoreSubmitInfoKHR* pSignalSemaphoreInfos;
+} VkSubmitInfo2KHR;
+
+typedef struct VkPhysicalDeviceSynchronization2FeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 synchronization2;
+} VkPhysicalDeviceSynchronization2FeaturesKHR;
+
+typedef struct VkQueueFamilyCheckpointProperties2NV {
+ VkStructureType sType;
+ void* pNext;
+ VkPipelineStageFlags2KHR checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointProperties2NV;
+
+typedef struct VkCheckpointData2NV {
+ VkStructureType sType;
+ void* pNext;
+ VkPipelineStageFlags2KHR stage;
+ void* pCheckpointMarker;
+} VkCheckpointData2NV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR(
+ VkCommandBuffer commandBuffer,
+ VkEvent event,
+ const VkDependencyInfoKHR* pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR(
+ VkCommandBuffer commandBuffer,
+ VkEvent event,
+ VkPipelineStageFlags2KHR stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR(
+ VkCommandBuffer commandBuffer,
+ uint32_t eventCount,
+ const VkEvent* pEvents,
+ const VkDependencyInfoKHR* pDependencyInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR(
+ VkCommandBuffer commandBuffer,
+ const VkDependencyInfoKHR* pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR(
+ VkCommandBuffer commandBuffer,
+ VkPipelineStageFlags2KHR stage,
+ VkQueryPool queryPool,
+ uint32_t query);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR(
+ VkQueue queue,
+ uint32_t submitCount,
+ const VkSubmitInfo2KHR* pSubmits,
+ VkFence fence);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD(
+ VkCommandBuffer commandBuffer,
+ VkPipelineStageFlags2KHR stage,
+ VkBuffer dstBuffer,
+ VkDeviceSize dstOffset,
+ uint32_t marker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV(
+ VkQueue queue,
+ uint32_t* pCheckpointDataCount,
+ VkCheckpointData2NV* pCheckpointData);
+#endif
+
+
#define VK_KHR_zero_initialize_workgroup_memory 1
#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1
#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory"