aboutsummaryrefslogtreecommitdiffhomepage
path: root/include
diff options
context:
space:
mode:
authorJon Leech <[email protected]>2022-09-28 21:18:09 -0700
committerJon Leech <[email protected]>2022-09-28 21:18:35 -0700
commit0df9899602351e03357d3f1895cdfb73acb5d294 (patch)
tree253bbef0262949ff09046fbf4a72a5f69f8fe2ac /include
parent39bcdfe388a5a88d34a941a9f75b2fd576da92cc (diff)
downloadVulkan-Headers-0df9899602351e03357d3f1895cdfb73acb5d294.tar.gz
Vulkan-Headers-0df9899602351e03357d3f1895cdfb73acb5d294.zip
Update for Vulkan-Docs 1.3.230v1.3.230
Diffstat (limited to 'include')
-rw-r--r--include/vk_video/vulkan_video_codec_h264std.h10
-rw-r--r--include/vk_video/vulkan_video_codec_h264std_decode.h7
-rw-r--r--include/vk_video/vulkan_video_codec_h264std_encode.h4
-rw-r--r--include/vk_video/vulkan_video_codec_h265std.h88
-rw-r--r--include/vk_video/vulkan_video_codec_h265std_decode.h8
-rw-r--r--include/vk_video/vulkan_video_codec_h265std_encode.h4
-rw-r--r--include/vulkan/vulkan.hpp713
-rw-r--r--include/vulkan/vulkan_beta.h4
-rw-r--r--include/vulkan/vulkan_core.h913
-rw-r--r--include/vulkan/vulkan_enums.hpp654
-rw-r--r--include/vulkan/vulkan_format_traits.hpp24
-rw-r--r--include/vulkan/vulkan_funcs.hpp1090
-rw-r--r--include/vulkan/vulkan_handles.hpp708
-rw-r--r--include/vulkan/vulkan_hash.hpp528
-rw-r--r--include/vulkan/vulkan_raii.hpp1110
-rw-r--r--include/vulkan/vulkan_static_assertions.hpp246
-rw-r--r--include/vulkan/vulkan_structs.hpp4569
-rw-r--r--include/vulkan/vulkan_to_string.hpp444
18 files changed, 10967 insertions, 157 deletions
diff --git a/include/vk_video/vulkan_video_codec_h264std.h b/include/vk_video/vulkan_video_codec_h264std.h
index 64b1017..d3ebec6 100644
--- a/include/vk_video/vulkan_video_codec_h264std.h
+++ b/include/vk_video/vulkan_video_codec_h264std.h
@@ -194,6 +194,7 @@ typedef struct StdVideoH264HrdParameters {
uint8_t cpb_cnt_minus1;
uint8_t bit_rate_scale;
uint8_t cpb_size_scale;
+ uint8_t reserved1;
uint32_t bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
uint32_t cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
uint8_t cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
@@ -214,11 +215,12 @@ typedef struct StdVideoH264SequenceParameterSetVui {
uint8_t matrix_coefficients;
uint32_t num_units_in_tick;
uint32_t time_scale;
- const StdVideoH264HrdParameters* pHrdParameters;
uint8_t max_num_reorder_frames;
uint8_t max_dec_frame_buffering;
uint8_t chroma_sample_loc_type_top_field;
uint8_t chroma_sample_loc_type_bottom_field;
+ uint32_t reserved1;
+ const StdVideoH264HrdParameters* pHrdParameters;
} StdVideoH264SequenceParameterSetVui;
typedef struct StdVideoH264SpsFlags {
@@ -251,23 +253,25 @@ typedef struct StdVideoH264SequenceParameterSet {
StdVideoH264SpsFlags flags;
StdVideoH264ProfileIdc profile_idc;
StdVideoH264LevelIdc level_idc;
- uint8_t seq_parameter_set_id;
StdVideoH264ChromaFormatIdc chroma_format_idc;
+ uint8_t seq_parameter_set_id;
uint8_t bit_depth_luma_minus8;
uint8_t bit_depth_chroma_minus8;
uint8_t log2_max_frame_num_minus4;
StdVideoH264PocType pic_order_cnt_type;
- uint8_t log2_max_pic_order_cnt_lsb_minus4;
int32_t offset_for_non_ref_pic;
int32_t offset_for_top_to_bottom_field;
+ uint8_t log2_max_pic_order_cnt_lsb_minus4;
uint8_t num_ref_frames_in_pic_order_cnt_cycle;
uint8_t max_num_ref_frames;
+ uint8_t reserved1;
uint32_t pic_width_in_mbs_minus1;
uint32_t pic_height_in_map_units_minus1;
uint32_t frame_crop_left_offset;
uint32_t frame_crop_right_offset;
uint32_t frame_crop_top_offset;
uint32_t frame_crop_bottom_offset;
+ uint32_t reserved2;
const int32_t* pOffsetForRefFrame;
const StdVideoH264ScalingLists* pScalingLists;
const StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui;
diff --git a/include/vk_video/vulkan_video_codec_h264std_decode.h b/include/vk_video/vulkan_video_codec_h264std_decode.h
index 1ee219b..98744f6 100644
--- a/include/vk_video/vulkan_video_codec_h264std_decode.h
+++ b/include/vk_video/vulkan_video_codec_h264std_decode.h
@@ -21,10 +21,10 @@ extern "C" {
#define vulkan_video_codec_h264std_decode 1
// Vulkan 0.9 provisional Vulkan video H.264 decode std specification version number
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_0_9_7 VK_MAKE_VIDEO_STD_VERSION(0, 9, 7) // Patch version should always be set to 0
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) // Patch version should always be set to 0
#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_0_9_7
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_0_9_8
#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode"
typedef enum StdVideoDecodeH264FieldOrderCount {
@@ -46,7 +46,8 @@ typedef struct StdVideoDecodeH264PictureInfo {
StdVideoDecodeH264PictureInfoFlags flags;
uint8_t seq_parameter_set_id;
uint8_t pic_parameter_set_id;
- uint16_t reserved;
+ uint8_t reserved1;
+ uint8_t reserved2;
uint16_t frame_num;
uint16_t idr_pic_id;
int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
diff --git a/include/vk_video/vulkan_video_codec_h264std_encode.h b/include/vk_video/vulkan_video_codec_h264std_encode.h
index 6f9b346..76f03eb 100644
--- a/include/vk_video/vulkan_video_codec_h264std_encode.h
+++ b/include/vk_video/vulkan_video_codec_h264std_encode.h
@@ -21,9 +21,9 @@ extern "C" {
#define vulkan_video_codec_h264std_encode 1
// Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_7 VK_MAKE_VIDEO_STD_VERSION(0, 9, 7) // Patch version should always be set to 0
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) // Patch version should always be set to 0
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_7
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8
#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode"
typedef struct StdVideoEncodeH264WeightTableFlags {
uint32_t luma_weight_l0_flag;
diff --git a/include/vk_video/vulkan_video_codec_h265std.h b/include/vk_video/vulkan_video_codec_h265std.h
index baa8ff0..862f881 100644
--- a/include/vk_video/vulkan_video_codec_h265std.h
+++ b/include/vk_video/vulkan_video_codec_h265std.h
@@ -34,12 +34,12 @@ extern "C" {
#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128
#define STD_VIDEO_H265_MAX_DPB_SIZE 16
#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32
-#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19
#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21
-#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
#define STD_VIDEO_H265_MAX_NUM_LIST_REF 15
#define STD_VIDEO_H265_MAX_CHROMA_PLANES 2
+#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64
#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16
#define STD_VIDEO_H265_MAX_DELTA_POC 48
@@ -96,6 +96,29 @@ typedef enum StdVideoH265PictureType {
STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
} StdVideoH265PictureType;
+
+typedef enum StdVideoH265AspectRatioIdc {
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265AspectRatioIdc;
typedef struct StdVideoH265DecPicBufMgr {
uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
@@ -133,8 +156,9 @@ typedef struct StdVideoH265HrdParameters {
uint8_t dpb_output_delay_length_minus1;
uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
- const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
- const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+ uint16_t reserved[3];
+ const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal;
+ const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl;
} StdVideoH265HrdParameters;
typedef struct StdVideoH265VpsFlags {
@@ -162,9 +186,12 @@ typedef struct StdVideoH265VideoParameterSet {
StdVideoH265VpsFlags flags;
uint8_t vps_video_parameter_set_id;
uint8_t vps_max_sub_layers_minus1;
+ uint8_t reserved1;
+ uint8_t reserved2;
uint32_t vps_num_units_in_tick;
uint32_t vps_time_scale;
uint32_t vps_num_ticks_poc_diff_one_minus1;
+ uint32_t reserved3;
const StdVideoH265DecPicBufMgr* pDecPicBufMgr;
const StdVideoH265HrdParameters* pHrdParameters;
const StdVideoH265ProfileTierLevel* pProfileTierLevel;
@@ -202,7 +229,7 @@ typedef struct StdVideoH265SpsVuiFlags {
typedef struct StdVideoH265SequenceParameterSetVui {
StdVideoH265SpsVuiFlags flags;
- uint8_t aspect_ratio_idc;
+ StdVideoH265AspectRatioIdc aspect_ratio_idc;
uint16_t sar_width;
uint16_t sar_height;
uint8_t video_format;
@@ -211,6 +238,8 @@ typedef struct StdVideoH265SequenceParameterSetVui {
uint8_t matrix_coeffs;
uint8_t chroma_sample_loc_type_top_field;
uint8_t chroma_sample_loc_type_bottom_field;
+ uint8_t reserved1;
+ uint8_t reserved2;
uint16_t def_disp_win_left_offset;
uint16_t def_disp_win_right_offset;
uint16_t def_disp_win_top_offset;
@@ -218,12 +247,13 @@ typedef struct StdVideoH265SequenceParameterSetVui {
uint32_t vui_num_units_in_tick;
uint32_t vui_time_scale;
uint32_t vui_num_ticks_poc_diff_one_minus1;
- const StdVideoH265HrdParameters* pHrdParameters;
uint16_t min_spatial_segmentation_idc;
+ uint16_t reserved3;
uint8_t max_bytes_per_pic_denom;
uint8_t max_bits_per_min_cu_denom;
uint8_t log2_max_mv_length_horizontal;
uint8_t log2_max_mv_length_vertical;
+ const StdVideoH265HrdParameters* pHrdParameters;
} StdVideoH265SequenceParameterSetVui;
typedef struct StdVideoH265PredictorPaletteEntries {
@@ -271,31 +301,33 @@ typedef struct StdVideoH265ShortTermRefPicSetFlags {
typedef struct StdVideoH265ShortTermRefPicSet {
StdVideoH265ShortTermRefPicSetFlags flags;
uint32_t delta_idx_minus1;
- uint32_t abs_delta_rps_minus1;
- uint16_t used_by_curr_pic_flag;
uint16_t use_delta_flag;
- uint32_t num_negative_pics;
- uint32_t num_positive_pics;
- uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+ uint16_t abs_delta_rps_minus1;
+ uint16_t used_by_curr_pic_flag;
uint16_t used_by_curr_pic_s0_flag;
- uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
uint16_t used_by_curr_pic_s1_flag;
+ uint16_t reserved1;
+ uint8_t reserved2;
+ uint8_t reserved3;
+ uint8_t num_negative_pics;
+ uint8_t num_positive_pics;
+ uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+ uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
} StdVideoH265ShortTermRefPicSet;
typedef struct StdVideoH265LongTermRefPicsSps {
- uint8_t num_long_term_ref_pics_sps;
- uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
uint32_t used_by_curr_pic_lt_sps_flag;
+ uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
} StdVideoH265LongTermRefPicsSps;
typedef struct StdVideoH265SequenceParameterSet {
StdVideoH265SpsFlags flags;
+ StdVideoH265ChromaFormatIdc chroma_format_idc;
uint32_t pic_width_in_luma_samples;
uint32_t pic_height_in_luma_samples;
uint8_t sps_video_parameter_set_id;
uint8_t sps_max_sub_layers_minus1;
uint8_t sps_seq_parameter_set_id;
- StdVideoH265ChromaFormatIdc chroma_format_idc;
uint8_t bit_depth_luma_minus8;
uint8_t bit_depth_chroma_minus8;
uint8_t log2_max_pic_order_cnt_lsb_minus4;
@@ -306,10 +338,17 @@ typedef struct StdVideoH265SequenceParameterSet {
uint8_t max_transform_hierarchy_depth_inter;
uint8_t max_transform_hierarchy_depth_intra;
uint8_t num_short_term_ref_pic_sets;
+ uint8_t num_long_term_ref_pics_sps;
uint8_t pcm_sample_bit_depth_luma_minus1;
uint8_t pcm_sample_bit_depth_chroma_minus1;
uint8_t log2_min_pcm_luma_coding_block_size_minus3;
uint8_t log2_diff_max_min_pcm_luma_coding_block_size;
+ uint8_t reserved1;
+ uint8_t reserved2;
+ uint8_t palette_max_size;
+ uint8_t delta_palette_max_predictor_size;
+ uint8_t motion_vector_resolution_control_idc;
+ uint8_t sps_num_palette_predictor_initializers_minus1;
uint32_t conf_win_left_offset;
uint32_t conf_win_right_offset;
uint32_t conf_win_top_offset;
@@ -317,13 +356,9 @@ typedef struct StdVideoH265SequenceParameterSet {
const StdVideoH265ProfileTierLevel* pProfileTierLevel;
const StdVideoH265DecPicBufMgr* pDecPicBufMgr;
const StdVideoH265ScalingLists* pScalingLists;
- const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet[STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS];
+ const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet;
const StdVideoH265LongTermRefPicsSps* pLongTermRefPicsSps;
const StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui;
- uint8_t palette_max_size;
- uint8_t delta_palette_max_predictor_size;
- uint8_t motion_vector_resolution_control_idc;
- uint8_t sps_num_palette_predictor_initializers_minus1;
const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries;
} StdVideoH265SequenceParameterSet;
@@ -373,14 +408,9 @@ typedef struct StdVideoH265PictureParameterSet {
uint8_t diff_cu_qp_delta_depth;
int8_t pps_cb_qp_offset;
int8_t pps_cr_qp_offset;
- uint8_t num_tile_columns_minus1;
- uint8_t num_tile_rows_minus1;
- uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
- uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
int8_t pps_beta_offset_div2;
int8_t pps_tc_offset_div2;
uint8_t log2_parallel_merge_level_minus2;
- const StdVideoH265ScalingLists* pScalingLists;
uint8_t log2_max_transform_skip_block_size_minus2;
uint8_t diff_cu_chroma_qp_offset_depth;
uint8_t chroma_qp_offset_list_len_minus1;
@@ -394,6 +424,14 @@ typedef struct StdVideoH265PictureParameterSet {
uint8_t pps_num_palette_predictor_initializers;
uint8_t luma_bit_depth_entry_minus8;
uint8_t chroma_bit_depth_entry_minus8;
+ uint8_t num_tile_columns_minus1;
+ uint8_t num_tile_rows_minus1;
+ uint8_t reserved1;
+ uint8_t reserved2;
+ uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
+ uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
+ uint32_t reserved3;
+ const StdVideoH265ScalingLists* pScalingLists;
const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries;
} StdVideoH265PictureParameterSet;
diff --git a/include/vk_video/vulkan_video_codec_h265std_decode.h b/include/vk_video/vulkan_video_codec_h265std_decode.h
index 314d846..831c41b 100644
--- a/include/vk_video/vulkan_video_codec_h265std_decode.h
+++ b/include/vk_video/vulkan_video_codec_h265std_decode.h
@@ -21,10 +21,10 @@ extern "C" {
#define vulkan_video_codec_h265std_decode 1
// Vulkan 0.9 provisional Vulkan video H.265 decode std specification version number
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) // Patch version should always be set to 0
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_0_9_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 9) // Patch version should always be set to 0
#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_0_9_8
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_0_9_9
#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode"
typedef struct StdVideoDecodeH265PictureInfoFlags {
uint32_t IrapPicFlag : 1;
@@ -38,10 +38,10 @@ typedef struct StdVideoDecodeH265PictureInfo {
uint8_t sps_video_parameter_set_id;
uint8_t pps_seq_parameter_set_id;
uint8_t pps_pic_parameter_set_id;
- uint8_t num_short_term_ref_pic_sets;
+ uint8_t NumDeltaPocsOfRefRpsIdx;
int32_t PicOrderCntVal;
uint16_t NumBitsForSTRefPicSetInSlice;
- uint8_t NumDeltaPocsOfRefRpsIdx;
+ uint16_t reserved;
uint8_t RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
uint8_t RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
uint8_t RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
diff --git a/include/vk_video/vulkan_video_codec_h265std_encode.h b/include/vk_video/vulkan_video_codec_h265std_encode.h
index acfbc6e..84e34e5 100644
--- a/include/vk_video/vulkan_video_codec_h265std_encode.h
+++ b/include/vk_video/vulkan_video_codec_h265std_encode.h
@@ -21,9 +21,9 @@ extern "C" {
#define vulkan_video_codec_h265std_encode 1
// Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) // Patch version should always be set to 0
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 9) // Patch version should always be set to 0
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_8
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9
#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode"
typedef struct StdVideoEncodeH265WeightTableFlags {
uint16_t luma_weight_l0_flag;
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
index dfdcd77..d241c1f 100644
--- a/include/vulkan/vulkan.hpp
+++ b/include/vulkan/vulkan.hpp
@@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
-static_assert( VK_HEADER_VERSION == 229, "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 230, "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -4976,6 +4976,13 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
}
+ //=== VK_EXT_device_fault ===
+
+ VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo );
+ }
+
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -5278,6 +5285,104 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset );
}
+ //=== VK_EXT_opacity_micromap ===
+
+ VkResult vkCreateMicromapEXT( VkDevice device,
+ const VkMicromapCreateInfoEXT * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap );
+ }
+
+ void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyMicromapEXT( device, micromap, pAllocator );
+ }
+
+ void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos );
+ }
+
+ VkResult vkBuildMicromapsEXT( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos );
+ }
+
+ VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMicromapEXT( device, deferredOperation, pInfo );
+ }
+
+ VkResult vkCopyMicromapToMemoryEXT( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo );
+ }
+
+ VkResult vkCopyMemoryToMicromapEXT( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo );
+ }
+
+ VkResult vkWriteMicromapsPropertiesEXT( VkDevice device,
+ uint32_t micromapCount,
+ const VkMicromapEXT * pMicromaps,
+ VkQueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride );
+ }
+
+ void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo );
+ }
+
+ void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo );
+ }
+
+ void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo );
+ }
+
+ void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer,
+ uint32_t micromapCount,
+ const VkMicromapEXT * pMicromaps,
+ VkQueryType queryType,
+ VkQueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery );
+ }
+
+ void vkGetDeviceMicromapCompatibilityEXT( VkDevice device,
+ const VkMicromapVersionInfoEXT * pVersionInfo,
+ VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility );
+ }
+
+ void vkGetMicromapBuildSizesEXT( VkDevice device,
+ VkAccelerationStructureBuildTypeKHR buildType,
+ const VkMicromapBuildInfoEXT * pBuildInfo,
+ VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo );
+ }
+
//=== VK_EXT_pageable_device_local_memory ===
void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT
@@ -5323,6 +5428,181 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData );
}
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin );
+ }
+
+ void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable );
+ }
+
+ void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode );
+ }
+
+ void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples );
+ }
+
+ void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask );
+ }
+
+ void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable );
+ }
+
+ void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable );
+ }
+
+ void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable );
+ }
+
+ void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables );
+ }
+
+ void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations );
+ }
+
+ void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks );
+ }
+
+ void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream );
+ }
+
+ void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer,
+ VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode );
+ }
+
+ void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize );
+ }
+
+ void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable );
+ }
+
+ void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable );
+ }
+
+ void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced );
+ }
+
+ void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode );
+ }
+
+ void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode );
+ }
+
+ void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable );
+ }
+
+ void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne );
+ }
+
+ void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable );
+ }
+
+ void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer,
+ uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles );
+ }
+
+ void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable );
+ }
+
+ void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation );
+ }
+
+ void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode );
+ }
+
+ void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable );
+ }
+
+ void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer,
+ uint32_t coverageModulationTableCount,
+ const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+ }
+
+ void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable );
+ }
+
+ void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable );
+ }
+
+ void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode );
+ }
+
//=== VK_EXT_shader_module_identifier ===
void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
@@ -5337,6 +5617,45 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier );
}
+ //=== VK_NV_optical_flow ===
+
+ VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice,
+ const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
+ uint32_t * pFormatCount,
+ VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties );
+ }
+
+ VkResult vkCreateOpticalFlowSessionNV( VkDevice device,
+ const VkOpticalFlowSessionCreateInfoNV * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession );
+ }
+
+ void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator );
+ }
+
+ VkResult vkBindOpticalFlowSessionImageNV( VkDevice device,
+ VkOpticalFlowSessionNV session,
+ VkOpticalFlowSessionBindingPointNV bindingPoint,
+ VkImageView view,
+ VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout );
+ }
+
+ void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer,
+ VkOpticalFlowSessionNV session,
+ const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
+ }
+
//=== VK_QCOM_tile_properties ===
VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device,
@@ -9938,6 +10257,40 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_NV_present_barrier ===
+ template <>
+ struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<SurfaceCapabilitiesPresentBarrierNV, SurfaceCapabilities2KHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<SwapchainPresentBarrierCreateInfoNV, SwapchainCreateInfoKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_KHR_present_id ===
template <>
struct StructExtends<PresentIdKHR, PresentInfoKHR>
@@ -10564,6 +10917,24 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_device_fault ===
+ template <>
+ struct StructExtends<PhysicalDeviceFaultFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceFaultFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_rgba10x6_formats ===
template <>
struct StructExtends<PhysicalDeviceRGBA10X6FormatsFeaturesEXT, PhysicalDeviceFeatures2>
@@ -10654,6 +11025,32 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_device_address_binding_report ===
+ template <>
+ struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<DeviceAddressBindingCallbackDataEXT, DebugUtilsMessengerCallbackDataEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_depth_clip_control ===
template <>
struct StructExtends<PhysicalDeviceDepthClipControlFeaturesEXT, PhysicalDeviceFeatures2>
@@ -11018,6 +11415,40 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_opacity_micromap ===
+ template <>
+ struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpacityMicromapPropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<AccelerationStructureTrianglesOpacityMicromapEXT, AccelerationStructureGeometryTrianglesDataKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_border_color_swizzle ===
template <>
struct StructExtends<PhysicalDeviceBorderColorSwizzleFeaturesEXT, PhysicalDeviceFeatures2>
@@ -11220,6 +11651,32 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_extended_dynamic_state3 ===
+ template <>
+ struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceExtendedDynamicState3PropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_subpass_merge_feedback ===
template <>
struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, PhysicalDeviceFeatures2>
@@ -11322,6 +11779,56 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_NV_optical_flow ===
+ template <>
+ struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpticalFlowPropertiesNV, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<OpticalFlowImageFormatInfoNV, PhysicalDeviceImageFormatInfo2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<OpticalFlowImageFormatInfoNV, ImageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<OpticalFlowSessionCreatePrivateDataInfoNV, OpticalFlowSessionCreateInfoNV>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_legacy_dithering ===
template <>
struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, PhysicalDeviceFeatures2>
@@ -11340,6 +11847,24 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_pipeline_protected_access ===
+ template <>
+ struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_QCOM_tile_properties ===
template <>
struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, PhysicalDeviceFeatures2>
@@ -12360,6 +12885,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_compression_control ===
PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+ //=== VK_EXT_device_fault ===
+ PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
@@ -12462,6 +12990,22 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+ //=== VK_EXT_opacity_micromap ===
+ PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+ PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+ PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+ PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+ PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+ PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+ PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+ PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+ PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+ PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+ PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+ PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+ PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+ PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
//=== VK_EXT_pageable_device_local_memory ===
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
@@ -12474,10 +13018,50 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
+ //=== VK_EXT_extended_dynamic_state3 ===
+ PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+ PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+ PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+ PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+ PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+ PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+ PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+ PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+ PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+ PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+ PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+ PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+ PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+ PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+ PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+ PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+ PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+ PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+ PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+ PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+ PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+ PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+ PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+ PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+ PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+ PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
//=== VK_EXT_shader_module_identifier ===
PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
+ //=== VK_NV_optical_flow ===
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+ PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+ PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+ PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+ PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
//=== VK_QCOM_tile_properties ===
PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
@@ -13553,6 +14137,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_compression_control ===
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
+ //=== VK_EXT_device_fault ===
+ vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) );
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
@@ -13652,6 +14239,22 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) );
vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) );
+ //=== VK_EXT_opacity_micromap ===
+ vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) );
+ vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) );
+ vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) );
+ vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) );
+ vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) );
+ vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) );
+ vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) );
+ vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) );
+ vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) );
+ vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) );
+ vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) );
+ vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) );
+ vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) );
+ vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) );
+
//=== VK_EXT_pageable_device_local_memory ===
vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) );
@@ -13674,11 +14277,57 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) );
+ //=== VK_EXT_extended_dynamic_state3 ===
+ vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) );
+ vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) );
+ vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) );
+ vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) );
+ vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) );
+ vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+ vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) );
+ vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) );
+ vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) );
+ vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) );
+ vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) );
+ vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) );
+ vkCmdSetConservativeRasterizationModeEXT =
+ PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) );
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT =
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+ vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) );
+ vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) );
+ vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) );
+ vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) );
+ vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) );
+ vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) );
+ vkCmdSetDepthClipNegativeOneToOneEXT =
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+ vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) );
+ vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) );
+ vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) );
+ vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) );
+ vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) );
+ vkCmdSetCoverageModulationTableEnableNV =
+ PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) );
+ vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) );
+ vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) );
+ vkCmdSetRepresentativeFragmentTestEnableNV =
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+ vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) );
+
//=== VK_EXT_shader_module_identifier ===
vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) );
vkGetShaderModuleCreateInfoIdentifierEXT =
PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+ //=== VK_NV_optical_flow ===
+ vkGetPhysicalDeviceOpticalFlowImageFormatsNV =
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
+ vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) );
+ vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) );
+ vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
+ vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
+
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
@@ -14426,6 +15075,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_compression_control ===
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+ //=== VK_EXT_device_fault ===
+ vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
+
//=== VK_KHR_ray_tracing_pipeline ===
vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
@@ -14503,6 +15155,22 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+ //=== VK_EXT_opacity_micromap ===
+ vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+ vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+ vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+ vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+ vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+ vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+ vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+ vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+ vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+ vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+ vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+ vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+ vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+ vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
+
//=== VK_EXT_pageable_device_local_memory ===
vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
@@ -14525,11 +15193,54 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
+ //=== VK_EXT_extended_dynamic_state3 ===
+ vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+ vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+ vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+ vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+ vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+ vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+ vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+ vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+ vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+ vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+ vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+ vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+ vkCmdSetConservativeRasterizationModeEXT =
+ PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT =
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+ vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+ vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+ vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+ vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+ vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+ vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+ vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+ vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+ vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+ vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+ vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+ vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+ vkCmdSetCoverageModulationTableEnableNV =
+ PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+ vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+ vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+ vkCmdSetRepresentativeFragmentTestEnableNV =
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+ vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
//=== VK_EXT_shader_module_identifier ===
vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
vkGetShaderModuleCreateInfoIdentifierEXT =
PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+ //=== VK_NV_optical_flow ===
+ vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+ vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+ vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+ vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h
index d20e256..db51102 100644
--- a/include/vulkan/vulkan_beta.h
+++ b/include/vulkan/vulkan_beta.h
@@ -22,7 +22,7 @@ extern "C" {
#define VK_KHR_video_queue 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR)
-#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 6
+#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 7
#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue"
typedef enum VkQueryResultStatusKHR {
@@ -167,7 +167,7 @@ typedef struct VkVideoPictureResourceInfoKHR {
typedef struct VkVideoReferenceSlotInfoKHR {
VkStructureType sType;
const void* pNext;
- int8_t slotIndex;
+ int32_t slotIndex;
const VkVideoPictureResourceInfoKHR* pPictureResource;
} VkVideoReferenceSlotInfoKHR;
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
index f192993..da5c99d 100644
--- a/include/vulkan/vulkan_core.h
+++ b/include/vulkan/vulkan_core.h
@@ -72,7 +72,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 229
+#define VK_HEADER_VERSION 230
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@@ -876,6 +876,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002,
VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000,
+ VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001,
+ VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002,
VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001,
#ifdef VK_ENABLE_BETA_EXTENSIONS
@@ -936,12 +939,17 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000,
+ VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001,
+ VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000,
VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000,
VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001,
VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000,
+ VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000,
@@ -982,6 +990,16 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000,
+ VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000,
+ VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001,
+ VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002,
+ VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003,
+ VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006,
+ VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007,
+ VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000,
VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000,
@@ -998,6 +1016,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001,
VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000,
VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001,
VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002,
@@ -1007,7 +1027,15 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002,
VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000,
VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000,
@@ -1299,6 +1327,8 @@ typedef enum VkObjectType {
VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
+ VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
+ VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
@@ -1577,6 +1607,7 @@ typedef enum VkFormat {
VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
+ VK_FORMAT_R16G16_S10_5_NV = 1000464000,
VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
@@ -1677,6 +1708,8 @@ typedef enum VkQueryType {
VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000,
VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000,
VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001,
+ VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000,
+ VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001,
VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkQueryType;
@@ -1836,6 +1869,37 @@ typedef enum VkDynamicState {
VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000,
VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003,
VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000,
+ VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002,
+ VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003,
+ VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004,
+ VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005,
+ VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006,
+ VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007,
+ VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008,
+ VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009,
+ VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010,
+ VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011,
+ VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012,
+ VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013,
+ VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014,
+ VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015,
+ VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016,
+ VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017,
+ VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018,
+ VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019,
+ VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020,
+ VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021,
+ VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022,
+ VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023,
+ VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024,
+ VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025,
+ VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026,
+ VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027,
+ VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028,
+ VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029,
+ VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030,
+ VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031,
+ VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032,
VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE,
VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE,
VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
@@ -2261,6 +2325,7 @@ typedef enum VkQueueFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040,
#endif
+ VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100,
VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkQueueFlagBits;
typedef VkFlags VkQueueFlags;
@@ -2411,6 +2476,8 @@ typedef enum VkBufferUsageFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000,
#endif
+ VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000,
+ VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000,
VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
@@ -2470,6 +2537,9 @@ typedef enum VkPipelineCreateFlagBits {
VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000,
VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000,
VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000,
+ VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000,
+ VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000,
+ VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000,
VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
@@ -6468,6 +6538,8 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT =
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL;
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL;
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL;
typedef VkFlags64 VkAccessFlags2;
@@ -6543,6 +6615,10 @@ static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0
static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL;
static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL;
static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL;
typedef enum VkSubmitFlagBits {
@@ -6641,6 +6717,9 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL;
typedef struct VkPhysicalDeviceVulkan13Features {
VkStructureType sType;
@@ -10845,6 +10924,7 @@ typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001,
VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002,
VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004,
+ VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008,
VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDebugUtilsMessageTypeFlagBitsEXT;
typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT;
@@ -11515,6 +11595,8 @@ typedef enum VkGeometryInstanceFlagBitsKHR {
VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002,
VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004,
VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008,
+ VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010,
+ VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020,
VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
@@ -11535,6 +11617,9 @@ typedef enum VkBuildAccelerationStructureFlagBitsKHR {
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008,
VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010,
VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
@@ -13354,6 +13439,29 @@ typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT {
#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type"
+#define VK_NV_present_barrier 1
+#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1
+#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier"
+typedef struct VkPhysicalDevicePresentBarrierFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 presentBarrier;
+} VkPhysicalDevicePresentBarrierFeaturesNV;
+
+typedef struct VkSurfaceCapabilitiesPresentBarrierNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 presentBarrierSupported;
+} VkSurfaceCapabilitiesPresentBarrierNV;
+
+typedef struct VkSwapchainPresentBarrierCreateInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 presentBarrierEnable;
+} VkSwapchainPresentBarrierCreateInfoNV;
+
+
+
#define VK_EXT_private_data 1
typedef VkPrivateDataSlot VkPrivateDataSlotEXT;
@@ -13787,6 +13895,83 @@ typedef struct VkPhysicalDevice4444FormatsFeaturesEXT {
+#define VK_EXT_device_fault 1
+#define VK_EXT_DEVICE_FAULT_SPEC_VERSION 1
+#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault"
+
+typedef enum VkDeviceFaultAddressTypeEXT {
+ VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultAddressTypeEXT;
+
+typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT {
+ VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1,
+ VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultVendorBinaryHeaderVersionEXT;
+typedef struct VkPhysicalDeviceFaultFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 deviceFault;
+ VkBool32 deviceFaultVendorBinary;
+} VkPhysicalDeviceFaultFeaturesEXT;
+
+typedef struct VkDeviceFaultCountsEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t addressInfoCount;
+ uint32_t vendorInfoCount;
+ VkDeviceSize vendorBinarySize;
+} VkDeviceFaultCountsEXT;
+
+typedef struct VkDeviceFaultAddressInfoEXT {
+ VkDeviceFaultAddressTypeEXT addressType;
+ VkDeviceAddress reportedAddress;
+ VkDeviceSize addressPrecision;
+} VkDeviceFaultAddressInfoEXT;
+
+typedef struct VkDeviceFaultVendorInfoEXT {
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ uint64_t vendorFaultCode;
+ uint64_t vendorFaultData;
+} VkDeviceFaultVendorInfoEXT;
+
+typedef struct VkDeviceFaultInfoEXT {
+ VkStructureType sType;
+ void* pNext;
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ VkDeviceFaultAddressInfoEXT* pAddressInfos;
+ VkDeviceFaultVendorInfoEXT* pVendorInfos;
+ void* pVendorBinaryData;
+} VkDeviceFaultInfoEXT;
+
+typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT {
+ uint32_t headerSize;
+ VkDeviceFaultVendorBinaryHeaderVersionEXT headerVersion;
+ uint32_t vendorID;
+ uint32_t deviceID;
+ uint32_t driverVersion;
+ uint8_t pipelineCacheUUID[VK_UUID_SIZE];
+ uint32_t applicationNameOffset;
+ uint32_t applicationVersion;
+ uint32_t engineNameOffset;
+} VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT(
+ VkDevice device,
+ VkDeviceFaultCountsEXT* pFaultCounts,
+ VkDeviceFaultInfoEXT* pFaultInfo);
+#endif
+
+
#define VK_ARM_rasterization_order_attachment_access 1
#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access"
@@ -13915,6 +14100,38 @@ typedef struct VkPhysicalDeviceDrmPropertiesEXT {
+#define VK_EXT_device_address_binding_report 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report"
+
+typedef enum VkDeviceAddressBindingTypeEXT {
+ VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0,
+ VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1,
+ VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingTypeEXT;
+
+typedef enum VkDeviceAddressBindingFlagBitsEXT {
+ VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001,
+ VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingFlagBitsEXT;
+typedef VkFlags VkDeviceAddressBindingFlagsEXT;
+typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 reportAddressBinding;
+} VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+typedef struct VkDeviceAddressBindingCallbackDataEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkDeviceAddressBindingFlagsEXT flags;
+ VkDeviceAddress baseAddress;
+ VkDeviceSize size;
+ VkDeviceAddressBindingTypeEXT bindingType;
+} VkDeviceAddressBindingCallbackDataEXT;
+
+
+
#define VK_EXT_depth_clip_control 1
#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1
#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control"
@@ -14244,6 +14461,274 @@ typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT {
+#define VK_EXT_opacity_micromap 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT)
+#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2
+#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap"
+
+typedef enum VkMicromapTypeEXT {
+ VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0,
+ VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapTypeEXT;
+
+typedef enum VkBuildMicromapModeEXT {
+ VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0,
+ VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapModeEXT;
+
+typedef enum VkCopyMicromapModeEXT {
+ VK_COPY_MICROMAP_MODE_CLONE_EXT = 0,
+ VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1,
+ VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2,
+ VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3,
+ VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkCopyMicromapModeEXT;
+
+typedef enum VkOpacityMicromapFormatEXT {
+ VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1,
+ VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2,
+ VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapFormatEXT;
+
+typedef enum VkOpacityMicromapSpecialIndexEXT {
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapSpecialIndexEXT;
+
+typedef enum VkAccelerationStructureCompatibilityKHR {
+ VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureCompatibilityKHR;
+
+typedef enum VkAccelerationStructureBuildTypeKHR {
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureBuildTypeKHR;
+
+typedef enum VkBuildMicromapFlagBitsEXT {
+ VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001,
+ VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002,
+ VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004,
+ VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapFlagBitsEXT;
+typedef VkFlags VkBuildMicromapFlagsEXT;
+
+typedef enum VkMicromapCreateFlagBitsEXT {
+ VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001,
+ VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapCreateFlagBitsEXT;
+typedef VkFlags VkMicromapCreateFlagsEXT;
+typedef struct VkMicromapUsageEXT {
+ uint32_t count;
+ uint32_t subdivisionLevel;
+ uint32_t format;
+} VkMicromapUsageEXT;
+
+typedef union VkDeviceOrHostAddressKHR {
+ VkDeviceAddress deviceAddress;
+ void* hostAddress;
+} VkDeviceOrHostAddressKHR;
+
+typedef struct VkMicromapBuildInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapTypeEXT type;
+ VkBuildMicromapFlagsEXT flags;
+ VkBuildMicromapModeEXT mode;
+ VkMicromapEXT dstMicromap;
+ uint32_t usageCountsCount;
+ const VkMicromapUsageEXT* pUsageCounts;
+ const VkMicromapUsageEXT* const* ppUsageCounts;
+ VkDeviceOrHostAddressConstKHR data;
+ VkDeviceOrHostAddressKHR scratchData;
+ VkDeviceOrHostAddressConstKHR triangleArray;
+ VkDeviceSize triangleArrayStride;
+} VkMicromapBuildInfoEXT;
+
+typedef struct VkMicromapCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapCreateFlagsEXT createFlags;
+ VkBuffer buffer;
+ VkDeviceSize offset;
+ VkDeviceSize size;
+ VkMicromapTypeEXT type;
+ VkDeviceAddress deviceAddress;
+} VkMicromapCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 micromap;
+ VkBool32 micromapCaptureReplay;
+ VkBool32 micromapHostCommands;
+} VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxOpacity2StateSubdivisionLevel;
+ uint32_t maxOpacity4StateSubdivisionLevel;
+} VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+typedef struct VkMicromapVersionInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ const uint8_t* pVersionData;
+} VkMicromapVersionInfoEXT;
+
+typedef struct VkCopyMicromapToMemoryInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapEXT src;
+ VkDeviceOrHostAddressKHR dst;
+ VkCopyMicromapModeEXT mode;
+} VkCopyMicromapToMemoryInfoEXT;
+
+typedef struct VkCopyMemoryToMicromapInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceOrHostAddressConstKHR src;
+ VkMicromapEXT dst;
+ VkCopyMicromapModeEXT mode;
+} VkCopyMemoryToMicromapInfoEXT;
+
+typedef struct VkCopyMicromapInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapEXT src;
+ VkMicromapEXT dst;
+ VkCopyMicromapModeEXT mode;
+} VkCopyMicromapInfoEXT;
+
+typedef struct VkMicromapBuildSizesInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceSize micromapSize;
+ VkDeviceSize buildScratchSize;
+ VkBool32 discardable;
+} VkMicromapBuildSizesInfoEXT;
+
+typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkIndexType indexType;
+ VkDeviceOrHostAddressConstKHR indexBuffer;
+ VkDeviceSize indexStride;
+ uint32_t baseTriangle;
+ uint32_t usageCountsCount;
+ const VkMicromapUsageEXT* pUsageCounts;
+ const VkMicromapUsageEXT* const* ppUsageCounts;
+ VkMicromapEXT micromap;
+} VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+typedef struct VkMicromapTriangleEXT {
+ uint32_t dataOffset;
+ uint16_t subdivisionLevel;
+ uint16_t format;
+} VkMicromapTriangleEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap);
+typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility);
+typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT(
+ VkDevice device,
+ const VkMicromapCreateInfoEXT* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkMicromapEXT* pMicromap);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT(
+ VkDevice device,
+ VkMicromapEXT micromap,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t infoCount,
+ const VkMicromapBuildInfoEXT* pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VkMicromapBuildInfoEXT* pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMicromapToMemoryInfoEXT* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMemoryToMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT(
+ VkDevice device,
+ uint32_t micromapCount,
+ const VkMicromapEXT* pMicromaps,
+ VkQueryType queryType,
+ size_t dataSize,
+ void* pData,
+ size_t stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMicromapToMemoryInfoEXT* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMemoryToMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t micromapCount,
+ const VkMicromapEXT* pMicromaps,
+ VkQueryType queryType,
+ VkQueryPool queryPool,
+ uint32_t firstQuery);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT(
+ VkDevice device,
+ const VkMicromapVersionInfoEXT* pVersionInfo,
+ VkAccelerationStructureCompatibilityKHR* pCompatibility);
+
+VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT(
+ VkDevice device,
+ VkAccelerationStructureBuildTypeKHR buildType,
+ const VkMicromapBuildInfoEXT* pBuildInfo,
+ VkMicromapBuildSizesInfoEXT* pSizeInfo);
+#endif
+
+
#define VK_EXT_load_store_op_none 1
#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1
#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none"
@@ -14429,6 +14914,239 @@ typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM {
+#define VK_EXT_extended_dynamic_state3 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3"
+typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 extendedDynamicState3TessellationDomainOrigin;
+ VkBool32 extendedDynamicState3DepthClampEnable;
+ VkBool32 extendedDynamicState3PolygonMode;
+ VkBool32 extendedDynamicState3RasterizationSamples;
+ VkBool32 extendedDynamicState3SampleMask;
+ VkBool32 extendedDynamicState3AlphaToCoverageEnable;
+ VkBool32 extendedDynamicState3AlphaToOneEnable;
+ VkBool32 extendedDynamicState3LogicOpEnable;
+ VkBool32 extendedDynamicState3ColorBlendEnable;
+ VkBool32 extendedDynamicState3ColorBlendEquation;
+ VkBool32 extendedDynamicState3ColorWriteMask;
+ VkBool32 extendedDynamicState3RasterizationStream;
+ VkBool32 extendedDynamicState3ConservativeRasterizationMode;
+ VkBool32 extendedDynamicState3ExtraPrimitiveOverestimationSize;
+ VkBool32 extendedDynamicState3DepthClipEnable;
+ VkBool32 extendedDynamicState3SampleLocationsEnable;
+ VkBool32 extendedDynamicState3ColorBlendAdvanced;
+ VkBool32 extendedDynamicState3ProvokingVertexMode;
+ VkBool32 extendedDynamicState3LineRasterizationMode;
+ VkBool32 extendedDynamicState3LineStippleEnable;
+ VkBool32 extendedDynamicState3DepthClipNegativeOneToOne;
+ VkBool32 extendedDynamicState3ViewportWScalingEnable;
+ VkBool32 extendedDynamicState3ViewportSwizzle;
+ VkBool32 extendedDynamicState3CoverageToColorEnable;
+ VkBool32 extendedDynamicState3CoverageToColorLocation;
+ VkBool32 extendedDynamicState3CoverageModulationMode;
+ VkBool32 extendedDynamicState3CoverageModulationTableEnable;
+ VkBool32 extendedDynamicState3CoverageModulationTable;
+ VkBool32 extendedDynamicState3CoverageReductionMode;
+ VkBool32 extendedDynamicState3RepresentativeFragmentTestEnable;
+ VkBool32 extendedDynamicState3ShadingRateImageEnable;
+} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 dynamicPrimitiveTopologyUnrestricted;
+} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+typedef struct VkColorBlendEquationEXT {
+ VkBlendFactor srcColorBlendFactor;
+ VkBlendFactor dstColorBlendFactor;
+ VkBlendOp colorBlendOp;
+ VkBlendFactor srcAlphaBlendFactor;
+ VkBlendFactor dstAlphaBlendFactor;
+ VkBlendOp alphaBlendOp;
+} VkColorBlendEquationEXT;
+
+typedef struct VkColorBlendAdvancedEXT {
+ VkBlendOp advancedBlendOp;
+ VkBool32 srcPremultiplied;
+ VkBool32 dstPremultiplied;
+ VkBlendOverlapEXT blendOverlap;
+ VkBool32 clampResults;
+} VkColorBlendAdvancedEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream);
+typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced);
+typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT(
+ VkCommandBuffer commandBuffer,
+ VkTessellationDomainOrigin domainOrigin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthClampEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkPolygonMode polygonMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT(
+ VkCommandBuffer commandBuffer,
+ VkSampleCountFlagBits rasterizationSamples);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT(
+ VkCommandBuffer commandBuffer,
+ VkSampleCountFlagBits samples,
+ const VkSampleMask* pSampleMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 alphaToCoverageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 alphaToOneEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 logicOpEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkBool32* pColorBlendEnables);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendEquationEXT* pColorBlendEquations);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorComponentFlags* pColorWriteMasks);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t rasterizationStream);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkConservativeRasterizationModeEXT conservativeRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT(
+ VkCommandBuffer commandBuffer,
+ float extraPrimitiveOverestimationSize);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthClipEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 sampleLocationsEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendAdvancedEXT* pColorBlendAdvanced);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkProvokingVertexModeEXT provokingVertexMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkLineRasterizationModeEXT lineRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 stippledLineEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 negativeOneToOne);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 viewportWScalingEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VkViewportSwizzleNV* pViewportSwizzles);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 coverageToColorEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV(
+ VkCommandBuffer commandBuffer,
+ uint32_t coverageToColorLocation);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV(
+ VkCommandBuffer commandBuffer,
+ VkCoverageModulationModeNV coverageModulationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 coverageModulationTableEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV(
+ VkCommandBuffer commandBuffer,
+ uint32_t coverageModulationTableCount,
+ const float* pCoverageModulationTable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 shadingRateImageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 representativeFragmentTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV(
+ VkCommandBuffer commandBuffer,
+ VkCoverageReductionModeNV coverageReductionMode);
+#endif
+
+
#define VK_EXT_subpass_merge_feedback 1
#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2
#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback"
@@ -14537,6 +15255,170 @@ VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT(
#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access"
+#define VK_NV_optical_flow 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV)
+#define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1
+#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow"
+
+typedef enum VkOpticalFlowPerformanceLevelNV {
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowPerformanceLevelNV;
+
+typedef enum VkOpticalFlowSessionBindingPointNV {
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionBindingPointNV;
+
+typedef enum VkOpticalFlowGridSizeFlagBitsNV {
+ VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002,
+ VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004,
+ VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008,
+ VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowGridSizeFlagBitsNV;
+typedef VkFlags VkOpticalFlowGridSizeFlagsNV;
+
+typedef enum VkOpticalFlowUsageFlagBitsNV {
+ VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002,
+ VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004,
+ VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008,
+ VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010,
+ VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowUsageFlagBitsNV;
+typedef VkFlags VkOpticalFlowUsageFlagsNV;
+
+typedef enum VkOpticalFlowSessionCreateFlagBitsNV {
+ VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002,
+ VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004,
+ VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008,
+ VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010,
+ VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionCreateFlagBitsNV;
+typedef VkFlags VkOpticalFlowSessionCreateFlagsNV;
+
+typedef enum VkOpticalFlowExecuteFlagBitsNV {
+ VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowExecuteFlagBitsNV;
+typedef VkFlags VkOpticalFlowExecuteFlagsNV;
+typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 opticalFlow;
+} VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkOpticalFlowGridSizeFlagsNV supportedOutputGridSizes;
+ VkOpticalFlowGridSizeFlagsNV supportedHintGridSizes;
+ VkBool32 hintSupported;
+ VkBool32 costSupported;
+ VkBool32 bidirectionalFlowSupported;
+ VkBool32 globalFlowSupported;
+ uint32_t minWidth;
+ uint32_t minHeight;
+ uint32_t maxWidth;
+ uint32_t maxHeight;
+ uint32_t maxNumRegionsOfInterest;
+} VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+typedef struct VkOpticalFlowImageFormatInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkOpticalFlowUsageFlagsNV usage;
+} VkOpticalFlowImageFormatInfoNV;
+
+typedef struct VkOpticalFlowImageFormatPropertiesNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkFormat format;
+} VkOpticalFlowImageFormatPropertiesNV;
+
+typedef struct VkOpticalFlowSessionCreateInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t width;
+ uint32_t height;
+ VkFormat imageFormat;
+ VkFormat flowVectorFormat;
+ VkFormat costFormat;
+ VkOpticalFlowGridSizeFlagsNV outputGridSize;
+ VkOpticalFlowGridSizeFlagsNV hintGridSize;
+ VkOpticalFlowPerformanceLevelNV performanceLevel;
+ VkOpticalFlowSessionCreateFlagsNV flags;
+} VkOpticalFlowSessionCreateInfoNV;
+
+typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t id;
+ uint32_t size;
+ const void* pPrivateData;
+} VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+typedef struct VkOpticalFlowExecuteInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ VkOpticalFlowExecuteFlagsNV flags;
+ uint32_t regionCount;
+ const VkRect2D* pRegions;
+} VkOpticalFlowExecuteInfoNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession);
+typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout);
+typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ VkPhysicalDevice physicalDevice,
+ const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo,
+ uint32_t* pFormatCount,
+ VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV(
+ VkDevice device,
+ const VkOpticalFlowSessionCreateInfoNV* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkOpticalFlowSessionNV* pSession);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV(
+ VkDevice device,
+ VkOpticalFlowSessionNV session,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV(
+ VkDevice device,
+ VkOpticalFlowSessionNV session,
+ VkOpticalFlowSessionBindingPointNV bindingPoint,
+ VkImageView view,
+ VkImageLayout layout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV(
+ VkCommandBuffer commandBuffer,
+ VkOpticalFlowSessionNV session,
+ const VkOpticalFlowExecuteInfoNV* pExecuteInfo);
+#endif
+
+
#define VK_EXT_legacy_dithering 1
#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1
#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering"
@@ -14548,6 +15430,17 @@ typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT {
+#define VK_EXT_pipeline_protected_access 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access"
+typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 pipelineProtectedAccess;
+} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+
+
#define VK_QCOM_tile_properties 1
#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1
#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties"
@@ -14616,30 +15509,12 @@ typedef enum VkBuildAccelerationStructureModeKHR {
VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
} VkBuildAccelerationStructureModeKHR;
-typedef enum VkAccelerationStructureBuildTypeKHR {
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkAccelerationStructureBuildTypeKHR;
-
-typedef enum VkAccelerationStructureCompatibilityKHR {
- VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0,
- VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1,
- VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkAccelerationStructureCompatibilityKHR;
-
typedef enum VkAccelerationStructureCreateFlagBitsKHR {
VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001,
VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004,
VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkAccelerationStructureCreateFlagBitsKHR;
typedef VkFlags VkAccelerationStructureCreateFlagsKHR;
-typedef union VkDeviceOrHostAddressKHR {
- VkDeviceAddress deviceAddress;
- void* hostAddress;
-} VkDeviceOrHostAddressKHR;
-
typedef struct VkAccelerationStructureBuildRangeInfoKHR {
uint32_t primitiveCount;
uint32_t primitiveOffset;
diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp
index c8602a5..99eef8d 100644
--- a/include/vulkan/vulkan_enums.hpp
+++ b/include/vulkan/vulkan_enums.hpp
@@ -692,6 +692,9 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
+ ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV,
+ eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV,
+ eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV,
ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR,
ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -746,6 +749,9 @@ namespace VULKAN_HPP_NAMESPACE
eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT,
ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT,
ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
+ ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT,
+ eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT,
+ eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT,
ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
@@ -754,6 +760,8 @@ namespace VULKAN_HPP_NAMESPACE
eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT,
eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT,
ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT,
+ ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT,
+ eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT,
ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT,
ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT,
ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT,
@@ -798,6 +806,16 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT,
+ eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT,
+ eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT,
+ eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT,
+ eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT,
+ eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT,
+ ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT,
+ ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT,
+ eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT,
+ eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT,
+ eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT,
ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT,
eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT,
ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
@@ -814,6 +832,8 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM,
ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM,
eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM,
+ ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT,
+ ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT,
ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT,
eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT,
eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT,
@@ -823,7 +843,15 @@ namespace VULKAN_HPP_NAMESPACE
ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT,
eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT,
ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+ ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV,
+ ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV,
+ eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV,
+ eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV,
+ eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV,
+ eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV,
+ eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
+ ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM,
eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM,
ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC,
@@ -1061,6 +1089,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_FUCHSIA )
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
+ eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
@@ -1325,6 +1355,7 @@ namespace VULKAN_HPP_NAMESPACE
ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+ eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
@@ -1552,8 +1583,9 @@ namespace VULKAN_HPP_NAMESPACE
eProtected = VK_QUEUE_PROTECTED_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
- eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR
+ eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV
};
enum class SampleCountFlagBits : VkSampleCountFlags
@@ -1716,7 +1748,9 @@ namespace VULKAN_HPP_NAMESPACE
eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT,
ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT,
eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR,
- eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR
+ eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR,
+ eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT,
+ eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT
};
enum class QueryPoolCreateFlagBits
@@ -1760,9 +1794,11 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
- eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
- eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
+ eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT,
+ eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT,
+ eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+ eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
+ eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
};
enum class SharingMode
@@ -1960,58 +1996,89 @@ namespace VULKAN_HPP_NAMESPACE
enum class DynamicState
{
- eViewport = VK_DYNAMIC_STATE_VIEWPORT,
- eScissor = VK_DYNAMIC_STATE_SCISSOR,
- eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
- eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
- eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
- eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
- eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
- eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
- eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
- eCullMode = VK_DYNAMIC_STATE_CULL_MODE,
- eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE,
- ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
- eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
- eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
- eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
- eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
- eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
- eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
- eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
- eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
- eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP,
- eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
- eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
- ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
- eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
- eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
- eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
- eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
- eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
- eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
- eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
- eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
- eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
- eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
- ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
- eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
- eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT,
- eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
- eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
- eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
- eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
- eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
- eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
- eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
- ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
- ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
- eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
- eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
- eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
- eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
- eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
- eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT
+ eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+ eScissor = VK_DYNAMIC_STATE_SCISSOR,
+ eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+ eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+ eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+ eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+ eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+ eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+ eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+ eCullMode = VK_DYNAMIC_STATE_CULL_MODE,
+ eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE,
+ ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
+ eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
+ eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
+ eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
+ eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
+ eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
+ eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
+ eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
+ eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
+ eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP,
+ eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
+ eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
+ ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
+ eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+ eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+ eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
+ eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
+ eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
+ eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
+ eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
+ eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
+ eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
+ eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
+ ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
+ eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
+ eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT,
+ eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT,
+ eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT,
+ ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT,
+ eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT,
+ eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT,
+ eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT,
+ eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT,
+ eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT,
+ eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT,
+ eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT,
+ eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT,
+ eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT,
+ eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT,
+ eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT,
+ eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT,
+ eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT,
+ eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT,
+ eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT,
+ eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT,
+ eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT,
+ eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT,
+ eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV,
+ eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV,
+ eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV,
+ eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV,
+ eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV,
+ eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV,
+ eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV,
+ eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV,
+ eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV,
+ eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV,
+ eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
+ eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
+ eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
+ eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
+ eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
+ eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
+ eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
+ ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
+ ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
+ eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
+ eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
+ eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
+ eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
+ eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
+ eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT
};
enum class FrontFace
@@ -2068,6 +2135,9 @@ namespace VULKAN_HPP_NAMESPACE
eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV,
eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+ eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT,
+ eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT,
+ eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT,
eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
@@ -2751,6 +2821,8 @@ namespace VULKAN_HPP_NAMESPACE
eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI,
eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI,
eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR,
+ eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT,
+ eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV,
eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
@@ -2802,6 +2874,10 @@ namespace VULKAN_HPP_NAMESPACE
eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI,
eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR,
+ eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT,
+ eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT,
+ eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV,
+ eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV,
eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
@@ -2869,6 +2945,9 @@ namespace VULKAN_HPP_NAMESPACE
eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM,
eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM,
eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM,
+ eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV,
+ eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV,
+ eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV,
eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
};
using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2;
@@ -3530,9 +3609,10 @@ namespace VULKAN_HPP_NAMESPACE
enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
{
- eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
- eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
- ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
+ eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+ eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+ ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
+ eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT
};
enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT
@@ -3588,6 +3668,8 @@ namespace VULKAN_HPP_NAMESPACE
eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+ eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT,
+ eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT,
eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV
@@ -3596,12 +3678,15 @@ namespace VULKAN_HPP_NAMESPACE
enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
{
- eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
- eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
- ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
- ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
- eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
- eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV
+ eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+ eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+ ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+ ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+ eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+ eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV,
+ eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT,
+ eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT,
+ eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT
};
using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
@@ -4127,6 +4212,24 @@ namespace VULKAN_HPP_NAMESPACE
e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT
};
+ //=== VK_EXT_device_fault ===
+
+ enum class DeviceFaultAddressTypeEXT
+ {
+ eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT,
+ eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT,
+ eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT,
+ eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT,
+ eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT,
+ eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT,
+ eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT
+ };
+
+ enum class DeviceFaultVendorBinaryHeaderVersionEXT
+ {
+ eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT
+ };
+
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
@@ -4153,6 +4256,19 @@ namespace VULKAN_HPP_NAMESPACE
eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR
};
+ //=== VK_EXT_device_address_binding_report ===
+
+ enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT
+ {
+ eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT
+ };
+
+ enum class DeviceAddressBindingTypeEXT
+ {
+ eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT,
+ eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT
+ };
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
@@ -4178,6 +4294,52 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_EXT_opacity_micromap ===
+
+ enum class MicromapTypeEXT
+ {
+ eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT
+ };
+
+ enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT
+ {
+ ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT,
+ ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT,
+ eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT
+ };
+
+ enum class CopyMicromapModeEXT
+ {
+ eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT,
+ eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT,
+ eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT,
+ eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT
+ };
+
+ enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT
+ {
+ eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT
+ };
+
+ enum class BuildMicromapModeEXT
+ {
+ eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT
+ };
+
+ enum class OpacityMicromapFormatEXT
+ {
+ e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT,
+ e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT
+ };
+
+ enum class OpacityMicromapSpecialIndexEXT
+ {
+ eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT,
+ eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT,
+ eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT,
+ eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT
+ };
+
//=== VK_EXT_subpass_merge_feedback ===
enum class SubpassMergeStatusEXT
@@ -4214,6 +4376,62 @@ namespace VULKAN_HPP_NAMESPACE
eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
};
+ //=== VK_NV_optical_flow ===
+
+ enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV,
+ eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV,
+ eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV,
+ eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV,
+ eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV,
+ eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV
+ };
+
+ enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV,
+ e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV,
+ e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV,
+ e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV,
+ e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV
+ };
+
+ enum class OpticalFlowPerformanceLevelNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV,
+ eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV,
+ eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV,
+ eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV
+ };
+
+ enum class OpticalFlowSessionBindingPointNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV,
+ eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV,
+ eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV,
+ eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV,
+ eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV,
+ eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV,
+ eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV,
+ eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV,
+ eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV
+ };
+
+ enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV
+ {
+ eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV,
+ eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV,
+ eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV,
+ eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV,
+ eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV
+ };
+
+ enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV
+ {
+ eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV
+ };
+
template <typename T>
struct IndexTypeValue
{
@@ -4505,6 +4723,7 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | VkFlags( QueueFlagBits::eOpticalFlowNV )
};
};
@@ -4933,6 +5152,7 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | VkFlags( BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT ) | VkFlags( BufferUsageFlagBits::eMicromapStorageEXT )
};
};
@@ -5143,7 +5363,9 @@ namespace VULKAN_HPP_NAMESPACE
VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) |
VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | VkFlags( PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) |
VkFlags( PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) | VkFlags( PipelineCreateFlagBits::eRayTracingAllowMotionNV ) |
- VkFlags( PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) | VkFlags( PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT )
+ VkFlags( PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) |
+ VkFlags( PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) | VkFlags( PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT ) |
+ VkFlags( PipelineCreateFlagBits::eNoProtectedAccessEXT ) | VkFlags( PipelineCreateFlagBits::eProtectedAccessOnlyEXT )
};
};
@@ -6448,7 +6670,8 @@ namespace VULKAN_HPP_NAMESPACE
VkFlags64( PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) | VkFlags64( PipelineStageFlagBits2::eRayTracingShaderKHR ) |
VkFlags64( PipelineStageFlagBits2::eFragmentDensityProcessEXT ) | VkFlags64( PipelineStageFlagBits2::eTaskShaderEXT ) |
VkFlags64( PipelineStageFlagBits2::eMeshShaderEXT ) | VkFlags64( PipelineStageFlagBits2::eSubpassShadingHUAWEI ) |
- VkFlags64( PipelineStageFlagBits2::eInvocationMaskHUAWEI ) | VkFlags64( PipelineStageFlagBits2::eAccelerationStructureCopyKHR )
+ VkFlags64( PipelineStageFlagBits2::eInvocationMaskHUAWEI ) | VkFlags64( PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) |
+ VkFlags64( PipelineStageFlagBits2::eMicromapBuildEXT ) | VkFlags64( PipelineStageFlagBits2::eOpticalFlowNV )
};
};
@@ -6481,26 +6704,27 @@ namespace VULKAN_HPP_NAMESPACE
{
enum : VkFlags64
{
- allFlags = VkFlags64( AccessFlagBits2::eNone ) | VkFlags64( AccessFlagBits2::eIndirectCommandRead ) | VkFlags64( AccessFlagBits2::eIndexRead ) |
- VkFlags64( AccessFlagBits2::eVertexAttributeRead ) | VkFlags64( AccessFlagBits2::eUniformRead ) |
- VkFlags64( AccessFlagBits2::eInputAttachmentRead ) | VkFlags64( AccessFlagBits2::eShaderRead ) | VkFlags64( AccessFlagBits2::eShaderWrite ) |
- VkFlags64( AccessFlagBits2::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2::eColorAttachmentWrite ) |
- VkFlags64( AccessFlagBits2::eDepthStencilAttachmentRead ) | VkFlags64( AccessFlagBits2::eDepthStencilAttachmentWrite ) |
- VkFlags64( AccessFlagBits2::eTransferRead ) | VkFlags64( AccessFlagBits2::eTransferWrite ) | VkFlags64( AccessFlagBits2::eHostRead ) |
- VkFlags64( AccessFlagBits2::eHostWrite ) | VkFlags64( AccessFlagBits2::eMemoryRead ) | VkFlags64( AccessFlagBits2::eMemoryWrite ) |
- VkFlags64( AccessFlagBits2::eShaderSampledRead ) | VkFlags64( AccessFlagBits2::eShaderStorageRead ) |
- VkFlags64( AccessFlagBits2::eShaderStorageWrite )
+ allFlags =
+ VkFlags64( AccessFlagBits2::eNone ) | VkFlags64( AccessFlagBits2::eIndirectCommandRead ) | VkFlags64( AccessFlagBits2::eIndexRead ) |
+ VkFlags64( AccessFlagBits2::eVertexAttributeRead ) | VkFlags64( AccessFlagBits2::eUniformRead ) | VkFlags64( AccessFlagBits2::eInputAttachmentRead ) |
+ VkFlags64( AccessFlagBits2::eShaderRead ) | VkFlags64( AccessFlagBits2::eShaderWrite ) | VkFlags64( AccessFlagBits2::eColorAttachmentRead ) |
+ VkFlags64( AccessFlagBits2::eColorAttachmentWrite ) | VkFlags64( AccessFlagBits2::eDepthStencilAttachmentRead ) |
+ VkFlags64( AccessFlagBits2::eDepthStencilAttachmentWrite ) | VkFlags64( AccessFlagBits2::eTransferRead ) |
+ VkFlags64( AccessFlagBits2::eTransferWrite ) | VkFlags64( AccessFlagBits2::eHostRead ) | VkFlags64( AccessFlagBits2::eHostWrite ) |
+ VkFlags64( AccessFlagBits2::eMemoryRead ) | VkFlags64( AccessFlagBits2::eMemoryWrite ) | VkFlags64( AccessFlagBits2::eShaderSampledRead ) |
+ VkFlags64( AccessFlagBits2::eShaderStorageRead ) | VkFlags64( AccessFlagBits2::eShaderStorageWrite )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags64( AccessFlagBits2::eVideoDecodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoDecodeWriteKHR ) |
- VkFlags64( AccessFlagBits2::eVideoEncodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoEncodeWriteKHR )
+ | VkFlags64( AccessFlagBits2::eVideoDecodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoDecodeWriteKHR ) |
+ VkFlags64( AccessFlagBits2::eVideoEncodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoEncodeWriteKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( AccessFlagBits2::eTransformFeedbackWriteEXT ) | VkFlags64( AccessFlagBits2::eTransformFeedbackCounterReadEXT ) |
- VkFlags64( AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) | VkFlags64( AccessFlagBits2::eConditionalRenderingReadEXT ) |
- VkFlags64( AccessFlagBits2::eCommandPreprocessReadNV ) | VkFlags64( AccessFlagBits2::eCommandPreprocessWriteNV ) |
- VkFlags64( AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) | VkFlags64( AccessFlagBits2::eAccelerationStructureReadKHR ) |
- VkFlags64( AccessFlagBits2::eAccelerationStructureWriteKHR ) | VkFlags64( AccessFlagBits2::eFragmentDensityMapReadEXT ) |
- VkFlags64( AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) | VkFlags64( AccessFlagBits2::eInvocationMaskReadHUAWEI ) |
- VkFlags64( AccessFlagBits2::eShaderBindingTableReadKHR )
+ | VkFlags64( AccessFlagBits2::eTransformFeedbackWriteEXT ) | VkFlags64( AccessFlagBits2::eTransformFeedbackCounterReadEXT ) |
+ VkFlags64( AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) | VkFlags64( AccessFlagBits2::eConditionalRenderingReadEXT ) |
+ VkFlags64( AccessFlagBits2::eCommandPreprocessReadNV ) | VkFlags64( AccessFlagBits2::eCommandPreprocessWriteNV ) |
+ VkFlags64( AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) | VkFlags64( AccessFlagBits2::eAccelerationStructureReadKHR ) |
+ VkFlags64( AccessFlagBits2::eAccelerationStructureWriteKHR ) | VkFlags64( AccessFlagBits2::eFragmentDensityMapReadEXT ) |
+ VkFlags64( AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) | VkFlags64( AccessFlagBits2::eInvocationMaskReadHUAWEI ) |
+ VkFlags64( AccessFlagBits2::eShaderBindingTableReadKHR ) | VkFlags64( AccessFlagBits2::eMicromapReadEXT ) |
+ VkFlags64( AccessFlagBits2::eMicromapWriteEXT ) | VkFlags64( AccessFlagBits2::eOpticalFlowReadNV ) | VkFlags64( AccessFlagBits2::eOpticalFlowWriteNV )
};
};
@@ -6626,7 +6850,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
| VkFlags64( FormatFeatureFlagBits2::eLinearColorAttachmentNV ) | VkFlags64( FormatFeatureFlagBits2::eWeightImageQCOM ) |
VkFlags64( FormatFeatureFlagBits2::eWeightSampledImageQCOM ) | VkFlags64( FormatFeatureFlagBits2::eBlockMatchingQCOM ) |
- VkFlags64( FormatFeatureFlagBits2::eBoxFilterSampledQCOM )
+ VkFlags64( FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) | VkFlags64( FormatFeatureFlagBits2::eOpticalFlowImageNV ) |
+ VkFlags64( FormatFeatureFlagBits2::eOpticalFlowVectorNV ) | VkFlags64( FormatFeatureFlagBits2::eOpticalFlowCostNV )
};
};
@@ -7837,7 +8062,7 @@ namespace VULKAN_HPP_NAMESPACE
enum : VkFlags
{
allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) |
- VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
+ VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) | VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding )
};
};
@@ -7915,7 +8140,8 @@ namespace VULKAN_HPP_NAMESPACE
enum : VkFlags
{
allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) |
- VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque )
+ VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) |
+ VkFlags( GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT ) | VkFlags( GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT )
};
};
@@ -7953,7 +8179,10 @@ namespace VULKAN_HPP_NAMESPACE
{
allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) |
VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) |
- VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eMotionNV )
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eMotionNV ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT )
};
};
@@ -8488,6 +8717,42 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ //=== VK_EXT_device_address_binding_report ===
+
+ using DeviceAddressBindingFlagsEXT = Flags<DeviceAddressBindingFlagBitsEXT>;
+
+ template <>
+ struct FlagTraits<DeviceAddressBindingFlagBitsEXT>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( DeviceAddressBindingFlagBitsEXT::eInternalObject )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceAddressBindingFlagsEXT operator|( DeviceAddressBindingFlagBitsEXT bit0,
+ DeviceAddressBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return DeviceAddressBindingFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceAddressBindingFlagsEXT operator&( DeviceAddressBindingFlagBitsEXT bit0,
+ DeviceAddressBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return DeviceAddressBindingFlagsEXT( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceAddressBindingFlagsEXT operator^( DeviceAddressBindingFlagBitsEXT bit0,
+ DeviceAddressBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return DeviceAddressBindingFlagsEXT( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceAddressBindingFlagsEXT operator~( DeviceAddressBindingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( DeviceAddressBindingFlagsEXT( bits ) );
+ }
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
@@ -8538,5 +8803,214 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_EXT_opacity_micromap ===
+
+ using BuildMicromapFlagsEXT = Flags<BuildMicromapFlagBitsEXT>;
+
+ template <>
+ struct FlagTraits<BuildMicromapFlagBitsEXT>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( BuildMicromapFlagBitsEXT::ePreferFastTrace ) | VkFlags( BuildMicromapFlagBitsEXT::ePreferFastBuild ) |
+ VkFlags( BuildMicromapFlagBitsEXT::eAllowCompaction )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildMicromapFlagsEXT operator|( BuildMicromapFlagBitsEXT bit0, BuildMicromapFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return BuildMicromapFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildMicromapFlagsEXT operator&( BuildMicromapFlagBitsEXT bit0, BuildMicromapFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return BuildMicromapFlagsEXT( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildMicromapFlagsEXT operator^( BuildMicromapFlagBitsEXT bit0, BuildMicromapFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return BuildMicromapFlagsEXT( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildMicromapFlagsEXT operator~( BuildMicromapFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( BuildMicromapFlagsEXT( bits ) );
+ }
+
+ using MicromapCreateFlagsEXT = Flags<MicromapCreateFlagBitsEXT>;
+
+ template <>
+ struct FlagTraits<MicromapCreateFlagBitsEXT>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MicromapCreateFlagsEXT operator|( MicromapCreateFlagBitsEXT bit0, MicromapCreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return MicromapCreateFlagsEXT( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MicromapCreateFlagsEXT operator&( MicromapCreateFlagBitsEXT bit0, MicromapCreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return MicromapCreateFlagsEXT( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MicromapCreateFlagsEXT operator^( MicromapCreateFlagBitsEXT bit0, MicromapCreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return MicromapCreateFlagsEXT( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MicromapCreateFlagsEXT operator~( MicromapCreateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( MicromapCreateFlagsEXT( bits ) );
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ using OpticalFlowUsageFlagsNV = Flags<OpticalFlowUsageFlagBitsNV>;
+
+ template <>
+ struct FlagTraits<OpticalFlowUsageFlagBitsNV>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowUsageFlagBitsNV::eUnknown ) | VkFlags( OpticalFlowUsageFlagBitsNV::eInput ) |
+ VkFlags( OpticalFlowUsageFlagBitsNV::eOutput ) | VkFlags( OpticalFlowUsageFlagBitsNV::eHint ) | VkFlags( OpticalFlowUsageFlagBitsNV::eCost ) |
+ VkFlags( OpticalFlowUsageFlagBitsNV::eGlobalFlow )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowUsageFlagsNV operator|( OpticalFlowUsageFlagBitsNV bit0,
+ OpticalFlowUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowUsageFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowUsageFlagsNV operator&( OpticalFlowUsageFlagBitsNV bit0,
+ OpticalFlowUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowUsageFlagsNV( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowUsageFlagsNV operator^( OpticalFlowUsageFlagBitsNV bit0,
+ OpticalFlowUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowUsageFlagsNV( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowUsageFlagsNV operator~( OpticalFlowUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( OpticalFlowUsageFlagsNV( bits ) );
+ }
+
+ using OpticalFlowGridSizeFlagsNV = Flags<OpticalFlowGridSizeFlagBitsNV>;
+
+ template <>
+ struct FlagTraits<OpticalFlowGridSizeFlagBitsNV>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowGridSizeFlagBitsNV::eUnknown ) | VkFlags( OpticalFlowGridSizeFlagBitsNV::e1X1 ) |
+ VkFlags( OpticalFlowGridSizeFlagBitsNV::e2X2 ) | VkFlags( OpticalFlowGridSizeFlagBitsNV::e4X4 ) |
+ VkFlags( OpticalFlowGridSizeFlagBitsNV::e8X8 )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowGridSizeFlagsNV operator|( OpticalFlowGridSizeFlagBitsNV bit0,
+ OpticalFlowGridSizeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowGridSizeFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowGridSizeFlagsNV operator&( OpticalFlowGridSizeFlagBitsNV bit0,
+ OpticalFlowGridSizeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowGridSizeFlagsNV( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowGridSizeFlagsNV operator^( OpticalFlowGridSizeFlagBitsNV bit0,
+ OpticalFlowGridSizeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowGridSizeFlagsNV( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowGridSizeFlagsNV operator~( OpticalFlowGridSizeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( OpticalFlowGridSizeFlagsNV( bits ) );
+ }
+
+ using OpticalFlowSessionCreateFlagsNV = Flags<OpticalFlowSessionCreateFlagBitsNV>;
+
+ template <>
+ struct FlagTraits<OpticalFlowSessionCreateFlagBitsNV>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowSessionCreateFlagBitsNV::eEnableHint ) | VkFlags( OpticalFlowSessionCreateFlagBitsNV::eEnableCost ) |
+ VkFlags( OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow ) | VkFlags( OpticalFlowSessionCreateFlagBitsNV::eAllowRegions ) |
+ VkFlags( OpticalFlowSessionCreateFlagBitsNV::eBothDirections )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateFlagsNV operator|( OpticalFlowSessionCreateFlagBitsNV bit0,
+ OpticalFlowSessionCreateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowSessionCreateFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateFlagsNV operator&( OpticalFlowSessionCreateFlagBitsNV bit0,
+ OpticalFlowSessionCreateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowSessionCreateFlagsNV( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateFlagsNV operator^( OpticalFlowSessionCreateFlagBitsNV bit0,
+ OpticalFlowSessionCreateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowSessionCreateFlagsNV( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateFlagsNV operator~( OpticalFlowSessionCreateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( OpticalFlowSessionCreateFlagsNV( bits ) );
+ }
+
+ using OpticalFlowExecuteFlagsNV = Flags<OpticalFlowExecuteFlagBitsNV>;
+
+ template <>
+ struct FlagTraits<OpticalFlowExecuteFlagBitsNV>
+ {
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints )
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowExecuteFlagsNV operator|( OpticalFlowExecuteFlagBitsNV bit0,
+ OpticalFlowExecuteFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowExecuteFlagsNV( bit0 ) | bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowExecuteFlagsNV operator&( OpticalFlowExecuteFlagBitsNV bit0,
+ OpticalFlowExecuteFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowExecuteFlagsNV( bit0 ) & bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowExecuteFlagsNV operator^( OpticalFlowExecuteFlagBitsNV bit0,
+ OpticalFlowExecuteFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return OpticalFlowExecuteFlagsNV( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR OpticalFlowExecuteFlagsNV operator~( OpticalFlowExecuteFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( OpticalFlowExecuteFlagsNV( bits ) );
+ }
+
} // namespace VULKAN_HPP_NAMESPACE
#endif
diff --git a/include/vulkan/vulkan_format_traits.hpp b/include/vulkan/vulkan_format_traits.hpp
index ac4250a..82bafce 100644
--- a/include/vulkan/vulkan_format_traits.hpp
+++ b/include/vulkan/vulkan_format_traits.hpp
@@ -267,6 +267,7 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4;
default: VULKAN_HPP_ASSERT( false ); return 0;
}
@@ -523,6 +524,7 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1;
default: VULKAN_HPP_ASSERT( false ); return 0;
}
@@ -2236,6 +2238,13 @@ namespace VULKAN_HPP_NAMESPACE
case 3: return 4;
default: VULKAN_HPP_ASSERT( false ); return 0;
}
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
default: return 0;
}
@@ -2492,6 +2501,7 @@ namespace VULKAN_HPP_NAMESPACE
case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2;
default: return 0;
}
@@ -4505,6 +4515,13 @@ namespace VULKAN_HPP_NAMESPACE
case 3: return "A";
default: VULKAN_HPP_ASSERT( false ); return "";
}
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
default: return "";
}
@@ -6518,6 +6535,13 @@ namespace VULKAN_HPP_NAMESPACE
case 3: return "SRGB";
default: VULKAN_HPP_ASSERT( false ); return "";
}
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
default: return "";
}
diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp
index 1225e27..650fbc3 100644
--- a/include/vulkan/vulkan_funcs.hpp
+++ b/include/vulkan/vulkan_funcs.hpp
@@ -18425,6 +18425,39 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_device_fault ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
+ m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ Device::getFaultInfoEXT( Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
+ VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
+ VkResult result =
+ d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+
+ return ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -19666,6 +19699,460 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_opacity_micromap ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type
+ Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
+ VkResult result =
+ d.vkCreateMicromapEXT( m_device,
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkMicromapEXT *>( &micromap ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), micromap );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type
+ Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
+ VkResult result =
+ d.vkCreateMicromapEXT( m_device,
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkMicromapEXT *>( &micromap ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyMicromapEXT( m_device,
+ static_cast<VkMicromapEXT>( micromap ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyMicromapEXT( m_device,
+ static_cast<VkMicromapEXT>( micromap ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkBuildMicromapsEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkBuildMicromapsEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>(
+ d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result =
+ d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyMicromapToMemoryEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyMemoryToMicromapEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
+ m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+
+ template <typename DataType, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
+ Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ DataType data;
+ VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
+ micromapCount,
+ reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetDeviceMicromapCompatibilityEXT( m_device,
+ reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+ d.vkGetDeviceMicromapCompatibilityEXT( m_device,
+ reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+
+ return compatibility;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetMicromapBuildSizesEXT( m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
+ reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
+ d.vkGetMicromapBuildSizesEXT( m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
+ reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
+
+ return sizeInfo;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
//=== VK_EXT_pageable_device_local_memory ===
template <typename Dispatch>
@@ -19876,6 +20363,354 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorBlendEquationEXT(
+ m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorBlendEquationEXT(
+ m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorWriteMaskEXT(
+ m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorBlendAdvancedEXT(
+ m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorBlendAdvancedEXT(
+ m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetViewportSwizzleNV(
+ m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
+ const float * pCoverageModulationTable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+ }
+
//=== VK_EXT_shader_module_identifier ===
template <typename Dispatch>
@@ -19927,6 +20762,261 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
+ uint32_t * pFormatCount,
+ VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
+ pFormatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
+ uint32_t formatCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && formatCount )
+ {
+ imageFormatProperties.resize( formatCount );
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+ VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+ if ( formatCount < imageFormatProperties.size() )
+ {
+ imageFormatProperties.resize( formatCount );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+ }
+
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties(
+ opticalFlowImageFormatPropertiesNVAllocator );
+ uint32_t formatCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && formatCount )
+ {
+ imageFormatProperties.resize( formatCount );
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+ VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+ if ( formatCount < imageFormatProperties.size() )
+ {
+ imageFormatProperties.resize( formatCount );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type
+ Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
+ VkResult result = d.vkCreateOpticalFlowSessionNV(
+ m_device,
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), session );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type
+ Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
+ VkResult result = d.vkCreateOpticalFlowSessionNV(
+ m_device,
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyOpticalFlowSessionNV(
+ m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyOpticalFlowSessionNV(
+ m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
+ static_cast<VkImageView>( view ),
+ static_cast<VkImageLayout>( layout ) ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
+ static_cast<VkImageView>( view ),
+ static_cast<VkImageLayout>( layout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdOpticalFlowExecuteNV(
+ m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdOpticalFlowExecuteNV(
+ m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
//=== VK_QCOM_tile_properties ===
template <typename Dispatch>
diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp
index 52b72d8..c0bab2c 100644
--- a/include/vulkan/vulkan_handles.hpp
+++ b/include/vulkan/vulkan_handles.hpp
@@ -1176,6 +1176,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_pipeline_library ===
struct PipelineLibraryCreateInfoKHR;
+ //=== VK_NV_present_barrier ===
+ struct PhysicalDevicePresentBarrierFeaturesNV;
+ struct SurfaceCapabilitiesPresentBarrierNV;
+ struct SwapchainPresentBarrierCreateInfoNV;
+
//=== VK_KHR_present_id ===
struct PresentIdKHR;
struct PhysicalDevicePresentIdFeaturesKHR;
@@ -1275,6 +1280,14 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_4444_formats ===
struct PhysicalDevice4444FormatsFeaturesEXT;
+ //=== VK_EXT_device_fault ===
+ struct PhysicalDeviceFaultFeaturesEXT;
+ struct DeviceFaultCountsEXT;
+ struct DeviceFaultInfoEXT;
+ struct DeviceFaultAddressInfoEXT;
+ struct DeviceFaultVendorInfoEXT;
+ struct DeviceFaultVendorBinaryHeaderVersionOneEXT;
+
//=== VK_EXT_rgba10x6_formats ===
struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
@@ -1303,6 +1316,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_physical_device_drm ===
struct PhysicalDeviceDrmPropertiesEXT;
+ //=== VK_EXT_device_address_binding_report ===
+ struct PhysicalDeviceAddressBindingReportFeaturesEXT;
+ struct DeviceAddressBindingCallbackDataEXT;
+
//=== VK_EXT_depth_clip_control ===
struct PhysicalDeviceDepthClipControlFeaturesEXT;
struct PipelineViewportDepthClipControlCreateInfoEXT;
@@ -1390,6 +1407,20 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_2d_view_of_3d ===
struct PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+ //=== VK_EXT_opacity_micromap ===
+ struct MicromapBuildInfoEXT;
+ struct MicromapUsageEXT;
+ struct MicromapCreateInfoEXT;
+ struct PhysicalDeviceOpacityMicromapFeaturesEXT;
+ struct PhysicalDeviceOpacityMicromapPropertiesEXT;
+ struct MicromapVersionInfoEXT;
+ struct CopyMicromapToMemoryInfoEXT;
+ struct CopyMemoryToMicromapInfoEXT;
+ struct CopyMicromapInfoEXT;
+ struct MicromapBuildSizesInfoEXT;
+ struct AccelerationStructureTrianglesOpacityMicromapEXT;
+ struct MicromapTriangleEXT;
+
//=== VK_EXT_border_color_swizzle ===
struct PhysicalDeviceBorderColorSwizzleFeaturesEXT;
struct SamplerBorderColorComponentMappingCreateInfoEXT;
@@ -1424,6 +1455,12 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceImageProcessingFeaturesQCOM;
struct PhysicalDeviceImageProcessingPropertiesQCOM;
+ //=== VK_EXT_extended_dynamic_state3 ===
+ struct PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ struct PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ struct ColorBlendEquationEXT;
+ struct ColorBlendAdvancedEXT;
+
//=== VK_EXT_subpass_merge_feedback ===
struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
struct RenderPassCreationControlEXT;
@@ -1442,9 +1479,21 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+ //=== VK_NV_optical_flow ===
+ struct PhysicalDeviceOpticalFlowFeaturesNV;
+ struct PhysicalDeviceOpticalFlowPropertiesNV;
+ struct OpticalFlowImageFormatInfoNV;
+ struct OpticalFlowImageFormatPropertiesNV;
+ struct OpticalFlowSessionCreateInfoNV;
+ struct OpticalFlowSessionCreatePrivateDataInfoNV;
+ struct OpticalFlowExecuteInfoNV;
+
//=== VK_EXT_legacy_dithering ===
struct PhysicalDeviceLegacyDitheringFeaturesEXT;
+ //=== VK_EXT_pipeline_protected_access ===
+ struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
//=== VK_QCOM_tile_properties ===
struct PhysicalDeviceTilePropertiesFeaturesQCOM;
struct TilePropertiesQCOM;
@@ -2825,6 +2874,88 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
+ class OpticalFlowSessionNV
+ {
+ public:
+ using CType = VkOpticalFlowSessionNV;
+ using NativeType = VkOpticalFlowSessionNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV() = default;
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT OpticalFlowSessionNV( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT
+ : m_opticalFlowSessionNV( opticalFlowSessionNV )
+ {
+ }
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+ OpticalFlowSessionNV & operator=( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT
+ {
+ m_opticalFlowSessionNV = opticalFlowSessionNV;
+ return *this;
+ }
+#endif
+
+ OpticalFlowSessionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_opticalFlowSessionNV = {};
+ return *this;
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowSessionNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV == rhs.m_opticalFlowSessionNV;
+ }
+
+ bool operator!=( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV != rhs.m_opticalFlowSessionNV;
+ }
+
+ bool operator<( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV < rhs.m_opticalFlowSessionNV;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkOpticalFlowSessionNV m_opticalFlowSessionNV = {};
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV;
+ };
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
class DescriptorUpdateTemplate
{
public:
@@ -3087,6 +3218,85 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
+ class MicromapEXT
+ {
+ public:
+ using CType = VkMicromapEXT;
+ using NativeType = VkMicromapEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR MicromapEXT() = default;
+ VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT MicromapEXT( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( micromapEXT ) {}
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+ MicromapEXT & operator=( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT
+ {
+ m_micromapEXT = micromapEXT;
+ return *this;
+ }
+#endif
+
+ MicromapEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_micromapEXT = {};
+ return *this;
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapEXT const & ) const = default;
+#else
+ bool operator==( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT == rhs.m_micromapEXT;
+ }
+
+ bool operator!=( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT != rhs.m_micromapEXT;
+ }
+
+ bool operator<( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT < rhs.m_micromapEXT;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkMicromapEXT m_micromapEXT = {};
+ };
+
+ template <>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::MicromapEXT;
+ };
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::MicromapEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
class CommandBuffer
{
public:
@@ -4903,6 +5113,252 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_opacity_micromap ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildMicromapsEXT( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEnableEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEnableEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEquationEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEquationEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorWriteMaskEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorWriteMaskEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportSwizzleNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportSwizzleNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageToColorLocationNV( uint32_t coverageToColorLocation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
+ const float * pCoverageModulationTable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
{
return m_commandBuffer;
@@ -7023,6 +7479,20 @@ namespace VULKAN_HPP_NAMESPACE
};
using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch>
+ class UniqueHandleTraits<MicromapEXT, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniqueMicromapEXT = UniqueHandle<MicromapEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch>
+ class UniqueHandleTraits<OpticalFlowSessionNV, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniqueOpticalFlowSessionNV = UniqueHandle<OpticalFlowSessionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch>
class UniqueHandleTraits<Pipeline, Dispatch>
{
public:
@@ -10930,6 +11400,18 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_device_fault ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ getFaultInfoEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
//=== VK_KHR_ray_tracing_pipeline ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -11219,6 +11701,144 @@ namespace VULKAN_HPP_NAMESPACE
getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_opacity_micromap ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type
+ createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type
+ createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type
+ writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
//=== VK_EXT_pageable_device_local_memory ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -11326,6 +11946,67 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type
+ createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type
+ createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
//=== VK_QCOM_tile_properties ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -11428,7 +12109,7 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( DisplayModeKHR const & ) const = default;
#else
- bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_displayModeKHR == rhs.m_displayModeKHR;
}
@@ -12785,6 +13466,31 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
+ uint32_t * pFormatCount,
+ VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = OpticalFlowImageFormatPropertiesNVAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
{
return m_physicalDevice;
diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp
index 713958d..8e2d114 100644
--- a/include/vulkan/vulkan_hash.hpp
+++ b/include/vulkan/vulkan_hash.hpp
@@ -472,6 +472,28 @@ namespace std
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ //=== VK_EXT_opacity_micromap ===
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapEXT const & micromapEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkMicromapEXT>{}( static_cast<VkMicromapEXT>( micromapEXT ) );
+ }
+ };
+
+ //=== VK_NV_optical_flow ===
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & opticalFlowSessionNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkOpticalFlowSessionNV>{}( static_cast<VkOpticalFlowSessionNV>( opticalFlowSessionNV ) );
+ }
+ };
+
#if 14 <= VULKAN_HPP_CPP_VERSION
//======================================
//=== HASH structures for structures ===
@@ -783,6 +805,19 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapUsageEXT const & micromapUsageEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.count );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.subdivisionLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.format );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const & accelerationStructureVersionInfoKHR ) const VULKAN_HPP_NOEXCEPT
@@ -1951,6 +1986,37 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const & colorBlendAdvancedEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.advancedBlendOp );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.srcPremultiplied );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.dstPremultiplied );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.blendOverlap );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.clampResults );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const & colorBlendEquationEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcColorBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstColorBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.colorBlendOp );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcAlphaBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstAlphaBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.alphaBlendOp );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const VULKAN_HPP_NOEXCEPT
@@ -2354,6 +2420,21 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.src );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.dst );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.mode );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT
@@ -2946,6 +3027,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const & deviceAddressBindingCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.baseAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.size );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.bindingType );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
@@ -3114,6 +3211,92 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const & deviceFaultAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.reportedAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressPrecision );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const & deviceFaultCountsEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.addressInfoCount );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorInfoCount );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorBinarySize );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const & deviceFaultVendorInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.description[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultCode );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultData );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const & deviceFaultInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pNext );
+ for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.description[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pAddressInfos );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorInfos );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorBinaryData );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const & deviceFaultVendorBinaryHeaderVersionOneEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerSize );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.vendorID );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.deviceID );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.driverVersion );
+ for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.pipelineCacheUUID[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationNameOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineNameOffset );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo ) const VULKAN_HPP_NOEXCEPT
@@ -5890,6 +6073,65 @@ namespace std
# endif /*VK_USE_PLATFORM_METAL_EXT*/
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const & micromapBuildSizesInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.micromapSize );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.buildScratchSize );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.discardable );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & micromapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.createFlags );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.buffer );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.offset );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.size );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.type );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.deviceAddress );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const & micromapTriangleEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.dataOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.subdivisionLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.format );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const & micromapVersionInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pVersionData );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT ) const VULKAN_HPP_NOEXCEPT
@@ -5983,6 +6225,84 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const & opticalFlowExecuteInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.regionCount );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pRegions );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const & opticalFlowImageFormatInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.usage );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const & opticalFlowImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.format );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & opticalFlowSessionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.width );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.height );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.imageFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flowVectorFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.costFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.outputGridSize );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.hintGridSize );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.performanceLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flags );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const & opticalFlowSessionCreatePrivateDataInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.id );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.size );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pPrivateData );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) const VULKAN_HPP_NOEXCEPT
@@ -6207,6 +6527,20 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const & physicalDeviceAddressBindingReportFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.reportAddressBinding );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>
{
std::size_t
@@ -6800,6 +7134,64 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const & physicalDeviceExtendedDynamicState3FeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3TessellationDomainOrigin );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClampEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3PolygonMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationSamples );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleMask );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToCoverageEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToOneEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LogicOpEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEquation );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorWriteMask );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationStream );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ConservativeRasterizationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ExtraPrimitiveOverestimationSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleLocationsEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendAdvanced );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ProvokingVertexMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineRasterizationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineStippleEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipNegativeOneToOne );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportWScalingEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportSwizzle );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorLocation );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTableEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageReductionMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RepresentativeFragmentTestEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ShadingRateImageEnable );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const & physicalDeviceExtendedDynamicState3PropertiesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.dynamicPrimitiveTopologyUnrestricted );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const & physicalDeviceExtendedDynamicStateFeaturesEXT ) const
@@ -6897,6 +7289,20 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFault );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFaultVendorBinary );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2 ) const VULKAN_HPP_NOEXCEPT
@@ -8039,6 +8445,74 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const & physicalDeviceOpacityMicromapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromap );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapCaptureReplay );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapHostCommands );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const & physicalDeviceOpacityMicromapPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity2StateSubdivisionLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity4StateSubdivisionLevel );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const & physicalDeviceOpticalFlowFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.opticalFlow );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const & physicalDeviceOpticalFlowPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedOutputGridSizes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedHintGridSizes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.hintSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.costSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.bidirectionalFlowSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.globalFlowSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minWidth );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minHeight );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxWidth );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxHeight );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxNumRegionsOfInterest );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>
{
std::size_t
@@ -8143,6 +8617,20 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const & physicalDevicePipelineProtectedAccessFeaturesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pipelineProtectedAccess );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const & physicalDevicePipelineRobustnessFeaturesEXT ) const
@@ -8234,6 +8722,20 @@ namespace std
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const & physicalDevicePresentBarrierFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.presentBarrier );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & physicalDevicePresentIdFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
@@ -11920,6 +12422,19 @@ namespace std
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const & surfaceCapabilitiesPresentBarrierNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.presentBarrierSupported );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT
@@ -12044,6 +12559,19 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const & swapchainPresentBarrierCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.presentBarrierEnable );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT
diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp
index 67d8765..80217dd 100644
--- a/include/vulkan/vulkan_raii.hpp
+++ b/include/vulkan/vulkan_raii.hpp
@@ -357,6 +357,10 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPhysicalDeviceExternalImageFormatPropertiesNV =
PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+ //=== VK_NV_optical_flow ===
+ vkGetPhysicalDeviceOpticalFlowImageFormatsNV =
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
+
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
@@ -625,6 +629,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_capabilities ===
PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+ //=== VK_NV_optical_flow ===
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
@@ -895,6 +902,9 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+ //=== VK_EXT_device_fault ===
+ vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
+
//=== VK_EXT_discard_rectangles ===
vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
@@ -955,6 +965,44 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdSetPrimitiveRestartEnable )
vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+ //=== VK_EXT_extended_dynamic_state3 ===
+ vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+ vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+ vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+ vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+ vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+ vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+ vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+ vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+ vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+ vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+ vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+ vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+ vkCmdSetConservativeRasterizationModeEXT =
+ PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT =
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+ vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+ vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+ vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+ vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+ vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+ vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+ vkCmdSetDepthClipNegativeOneToOneEXT =
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+ vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+ vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+ vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+ vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+ vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+ vkCmdSetCoverageModulationTableEnableNV =
+ PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+ vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+ vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+ vkCmdSetRepresentativeFragmentTestEnableNV =
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+ vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
//=== VK_EXT_external_memory_host ===
vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
@@ -998,6 +1046,22 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+ //=== VK_EXT_opacity_micromap ===
+ vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+ vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+ vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+ vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+ vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+ vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+ vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+ vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+ vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+ vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+ vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+ vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+ vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+ vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
+
//=== VK_EXT_pageable_device_local_memory ===
vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
@@ -1460,6 +1524,12 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+ //=== VK_NV_optical_flow ===
+ vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+ vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+ vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+ vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+
//=== VK_NV_ray_tracing ===
vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
@@ -1745,6 +1815,9 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+ //=== VK_EXT_device_fault ===
+ PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
//=== VK_EXT_discard_rectangles ===
PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
@@ -1775,6 +1848,39 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
+ //=== VK_EXT_extended_dynamic_state3 ===
+ PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+ PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+ PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+ PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+ PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+ PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+ PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+ PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+ PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+ PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+ PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+ PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+ PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+ PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+ PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+ PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+ PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+ PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+ PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+ PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+ PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+ PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+ PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+ PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+ PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+ PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
//=== VK_EXT_external_memory_host ===
PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
@@ -1820,6 +1926,22 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+ //=== VK_EXT_opacity_micromap ===
+ PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+ PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+ PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+ PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+ PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+ PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+ PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+ PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+ PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+ PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+ PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+ PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+ PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+ PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
//=== VK_EXT_pageable_device_local_memory ===
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
@@ -2181,6 +2303,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
+ //=== VK_NV_optical_flow ===
+ PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+ PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+ PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+ PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
//=== VK_NV_ray_tracing ===
PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
@@ -2299,6 +2427,12 @@ namespace VULKAN_HPP_NAMESPACE
class BufferCollectionFUCHSIA;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
+ //=== VK_EXT_opacity_micromap ===
+ class MicromapEXT;
+
+ //=== VK_NV_optical_flow ===
+ class OpticalFlowSessionNV;
+
//====================
//=== RAII HANDLES ===
//====================
@@ -3013,6 +3147,11 @@ namespace VULKAN_HPP_NAMESPACE
struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+ getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const;
+
private:
VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
@@ -3766,6 +3905,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_METAL_EXT*/
+ //=== VK_EXT_device_fault ===
+
+ VULKAN_HPP_NODISCARD
+ std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ getFaultInfoEXT() const;
+
//=== VK_KHR_ray_tracing_pipeline ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createRayTracingPipelinesKHR(
@@ -3815,6 +3960,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const;
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::MicromapEXT
+ createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const;
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD std::vector<DataType>
+ writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride ) const;
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_KHR_maintenance4 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
@@ -3844,6 +4027,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV
+ createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
@@ -5375,6 +5564,101 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstInstance,
Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ //=== VK_EXT_opacity_micromap ===
+
+ void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
+
+ void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT;
+
+ void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT;
+
+ void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const VULKAN_HPP_NOEXCEPT;
+
+ void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setColorBlendEnableEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT;
+
+ void setColorBlendEquationEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setColorWriteMaskEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT;
+
+ void
+ setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT;
+
+ void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT;
+
+ void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setViewportSwizzleNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_optical_flow ===
+
+ void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
@@ -7633,6 +7917,237 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
+ class MicromapEXT
+ {
+ public:
+ using CType = VkMicromapEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateMicromapEXT( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkMicromapEXT *>( &m_micromap ) ) );
+ if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ throwResultException( result, "vkCreateMicromapEXT" );
+ }
+ }
+
+ MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkMicromapEXT micromap,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_micromap( micromap )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ }
+
+ MicromapEXT( std::nullptr_t ) {}
+
+ ~MicromapEXT()
+ {
+ clear();
+ }
+
+ MicromapEXT() = delete;
+ MicromapEXT( MicromapEXT const & ) = delete;
+ MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
+ : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
+ , m_micromap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ) )
+ , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+ {
+ }
+ MicromapEXT & operator=( MicromapEXT const & ) = delete;
+ MicromapEXT & operator =( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ if ( this != &rhs )
+ {
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_micromap = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ }
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromap;
+ }
+
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_micromap )
+ {
+ getDispatcher()->vkDestroyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( m_micromap ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_micromap = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
+ VULKAN_HPP_NAMESPACE::Device getDevice() const
+ {
+ return m_device;
+ }
+
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_micromap, rhs.m_micromap );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ private:
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ };
+
+ class OpticalFlowSessionNV
+ {
+ public:
+ using CType = VkOpticalFlowSessionNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( &m_session ) ) );
+ if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ throwResultException( result, "vkCreateOpticalFlowSessionNV" );
+ }
+ }
+
+ OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkOpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_session( session )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ }
+
+ OpticalFlowSessionNV( std::nullptr_t ) {}
+
+ ~OpticalFlowSessionNV()
+ {
+ clear();
+ }
+
+ OpticalFlowSessionNV() = delete;
+ OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete;
+ OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
+ : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
+ , m_session( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ) )
+ , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+ {
+ }
+ OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete;
+ OpticalFlowSessionNV & operator =( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ if ( this != &rhs )
+ {
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_session = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ }
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_session;
+ }
+
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_session )
+ {
+ getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkOpticalFlowSessionNV>( m_session ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_session = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
+ VULKAN_HPP_NAMESPACE::Device getDevice() const
+ {
+ return m_device;
+ }
+
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_session, rhs.m_session );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout ) const;
+
+ private:
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ };
+
class PerformanceConfigurationINTEL
{
public:
@@ -16781,6 +17296,26 @@ namespace VULKAN_HPP_NAMESPACE
return structureChain;
}
+ //=== VK_EXT_device_fault ===
+
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ Device::getFaultInfoEXT() const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceFaultInfoEXT && "Function <vkGetDeviceFaultInfoEXT> needs extension <VK_EXT_device_fault> enabled!" );
+
+ std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
+ VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
+ VkResult result = getDispatcher()->vkGetDeviceFaultInfoEXT(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -17300,6 +17835,215 @@ namespace VULKAN_HPP_NAMESPACE
static_cast<const int32_t *>( vertexOffset ) );
}
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::MicromapEXT
+ Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, createInfo, allocator );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdBuildMicromapsEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infos.size(),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkCopyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT &&
+ "Function <vkCopyMicromapToMemoryEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT &&
+ "Function <vkCopyMemoryToMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
+ Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT &&
+ "Function <vkWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+ std::vector<DataType> data( dataSize / sizeof( DataType ) );
+ VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
+
+ return data;
+ }
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
+ Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT &&
+ "Function <vkWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ DataType data;
+ VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
+
+ return data;
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT &&
+ "Function <vkCmdCopyMicromapToMemoryEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT &&
+ "Function <vkCmdCopyMemoryToMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT &&
+ "Function <vkCmdWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT &&
+ "Function <vkGetDeviceMicromapCompatibilityEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+ getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+
+ return compatibility;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT &&
+ "Function <vkGetMicromapBuildSizesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
+ getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
+ reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
+
+ return sizeInfo;
+ }
+
//=== VK_EXT_pageable_device_local_memory ===
VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT
@@ -17423,6 +18167,303 @@ namespace VULKAN_HPP_NAMESPACE
return pData;
}
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT &&
+ "Function <vkCmdSetTessellationDomainOriginEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT &&
+ "Function <vkCmdSetDepthClampEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT &&
+ "Function <vkCmdSetPolygonModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT &&
+ "Function <vkCmdSetRasterizationSamplesEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT &&
+ "Function <vkCmdSetSampleMaskEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetSampleMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT &&
+ "Function <vkCmdSetAlphaToCoverageEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT &&
+ "Function <vkCmdSetAlphaToOneEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT &&
+ "Function <vkCmdSetLogicOpEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT(
+ uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT &&
+ "Function <vkCmdSetColorBlendEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorBlendEnables.size(),
+ reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT(
+ uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT &&
+ "Function <vkCmdSetColorBlendEquationEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorBlendEquations.size(),
+ reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT(
+ uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT &&
+ "Function <vkCmdSetColorWriteMaskEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorWriteMasks.size(),
+ reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT &&
+ "Function <vkCmdSetRasterizationStreamEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT(
+ VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT &&
+ "Function <vkCmdSetConservativeRasterizationModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT &&
+ "Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT &&
+ "Function <vkCmdSetDepthClipEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT &&
+ "Function <vkCmdSetSampleLocationsEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT(
+ uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT &&
+ "Function <vkCmdSetColorBlendAdvancedEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorBlendAdvanced.size(),
+ reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT &&
+ "Function <vkCmdSetProvokingVertexModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT &&
+ "Function <vkCmdSetLineRasterizationModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT &&
+ "Function <vkCmdSetLineStippleEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT &&
+ "Function <vkCmdSetDepthClipNegativeOneToOneEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV &&
+ "Function <vkCmdSetViewportWScalingEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV(
+ uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV &&
+ "Function <vkCmdSetViewportSwizzleNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstViewport,
+ viewportSwizzles.size(),
+ reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV &&
+ "Function <vkCmdSetCoverageToColorEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV &&
+ "Function <vkCmdSetCoverageToColorLocationNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV &&
+ "Function <vkCmdSetCoverageModulationModeNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV &&
+ "Function <vkCmdSetCoverageModulationTableEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( coverageModulationTableEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV &&
+ "Function <vkCmdSetCoverageModulationTableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageModulationTableNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV &&
+ "Function <vkCmdSetShadingRateImageEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV &&
+ "Function <vkCmdSetRepresentativeFragmentTestEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( representativeFragmentTestEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV &&
+ "Function <vkCmdSetCoverageReductionModeNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+ }
+
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT
@@ -17451,6 +18492,75 @@ namespace VULKAN_HPP_NAMESPACE
return identifier;
}
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV &&
+ "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> imageFormatProperties;
+ uint32_t formatCount;
+ VkResult result;
+ do
+ {
+ result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && formatCount )
+ {
+ imageFormatProperties.resize( formatCount );
+ result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+ VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+ if ( formatCount < imageFormatProperties.size() )
+ {
+ imageFormatProperties.resize( formatCount );
+ }
+ return imageFormatProperties;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV
+ Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, createInfo, allocator );
+ }
+
+ VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV &&
+ "Function <vkBindOpticalFlowSessionImageNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+ VkResult result = getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkOpticalFlowSessionNV>( m_session ),
+ static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
+ static_cast<VkImageView>( view ),
+ static_cast<VkImageLayout>( layout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+ getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
+ }
+
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM() const
diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp
index dbfaf5c..013e2c2 100644
--- a/include/vulkan/vulkan_static_assertions.hpp
+++ b/include/vulkan/vulkan_static_assertions.hpp
@@ -4685,6 +4685,29 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Pipeline
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value,
"PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" );
+//=== VK_NV_present_barrier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value,
+ "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value,
+ "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value,
+ "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" );
+
//=== VK_KHR_present_id ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" );
@@ -5104,6 +5127,43 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
"PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+//=== VK_EXT_device_fault ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value,
+ "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value,
+ "DeviceFaultCountsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value,
+ "DeviceFaultInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value,
+ "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value,
+ "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value,
+ "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" );
+
//=== VK_EXT_rgba10x6_formats ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ),
@@ -5212,6 +5272,23 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value,
"PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" );
+//=== VK_EXT_device_address_binding_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value,
+ "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value,
+ "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" );
+
//=== VK_EXT_depth_clip_control ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ),
@@ -5553,6 +5630,83 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
"PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
+//=== VK_EXT_opacity_micromap ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value,
+ "MicromapBuildInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value,
+ "MicromapUsageEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value,
+ "MicromapCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapEXT>::value, "MicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value,
+ "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value,
+ "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value,
+ "MicromapVersionInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value,
+ "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value,
+ "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value,
+ "CopyMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value,
+ "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) ==
+ sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value,
+ "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value,
+ "MicromapTriangleEXT is not nothrow_move_constructible!" );
+
//=== VK_EXT_border_color_swizzle ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) ==
@@ -5693,6 +5847,36 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value,
"PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" );
+//=== VK_EXT_extended_dynamic_state3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value,
+ "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value,
+ "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value,
+ "ColorBlendEquationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value,
+ "ColorBlendAdvancedEXT is not nothrow_move_constructible!" );
+
//=== VK_EXT_subpass_merge_feedback ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) ==
@@ -5777,6 +5961,58 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value,
"PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" );
+//=== VK_NV_optical_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value,
+ "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value,
+ "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value,
+ "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
+ "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::value,
+ "OpticalFlowSessionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value,
+ "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value,
+ "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value,
+ "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" );
+
//=== VK_EXT_legacy_dithering ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ),
@@ -5786,6 +6022,16 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value,
"PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" );
+//=== VK_EXT_pipeline_protected_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
+ "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
+
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ),
diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp
index ade0079..eb83a4e 100644
--- a/include/vulkan/vulkan_structs.hpp
+++ b/include/vulkan/vulkan_structs.hpp
@@ -3412,6 +3412,307 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {};
};
+ struct MicromapUsageEXT
+ {
+ using NativeType = VkMicromapUsageEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT
+ : count( count_ )
+ , subdivisionLevel( subdivisionLevel_ )
+ , format( format_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast<MicromapUsageEXT const *>( &rhs ) ) {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapUsageEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
+ {
+ count = count_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subdivisionLevel = subdivisionLevel_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT
+ {
+ format = format_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapUsageEXT *>( this );
+ }
+
+ operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapUsageEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( count, subdivisionLevel, format );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapUsageEXT const & ) const = default;
+#else
+ bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
+# endif
+ }
+
+ bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t count = {};
+ uint32_t subdivisionLevel = {};
+ uint32_t format = {};
+ };
+
+ struct AccelerationStructureTrianglesOpacityMicromapEXT
+ {
+ using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {},
+ uint32_t baseTriangle_ = {},
+ uint32_t usageCountsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , indexType( indexType_ )
+ , indexBuffer( indexBuffer_ )
+ , indexStride( indexStride_ )
+ , baseTriangle( baseTriangle_ )
+ , usageCountsCount( usageCountsCount_ )
+ , pUsageCounts( pUsageCounts_ )
+ , ppUsageCounts( ppUsageCounts_ )
+ , micromap( micromap_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ AccelerationStructureTrianglesOpacityMicromapEXT(
+ VULKAN_HPP_NAMESPACE::IndexType indexType_,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_,
+ VULKAN_HPP_NAMESPACE::DeviceSize indexStride_,
+ uint32_t baseTriangle_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {},
+ void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , indexType( indexType_ )
+ , indexBuffer( indexBuffer_ )
+ , indexStride( indexStride_ )
+ , baseTriangle( baseTriangle_ )
+ , usageCountsCount( static_cast<uint32_t>( usageCounts_.size() ) )
+ , pUsageCounts( usageCounts_.data() )
+ , ppUsageCounts( pUsageCounts_.data() )
+ , micromap( micromap_ )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) );
+# else
+ if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) )
+ {
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexType = indexType_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexBuffer = indexBuffer_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexStride = indexStride_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ baseTriangle = baseTriangle_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = usageCountsCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pUsageCounts = pUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ AccelerationStructureTrianglesOpacityMicromapEXT &
+ setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
+ pUsageCounts = usageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ppUsageCounts = ppUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
+ ppUsageCounts = pUsageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromap = micromap_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
+ }
+
+ operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::IndexType const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ uint32_t const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
+ uint32_t baseTriangle = {};
+ uint32_t usageCountsCount = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT>
+ {
+ using Type = AccelerationStructureTrianglesOpacityMicromapEXT;
+ };
+
struct AccelerationStructureVersionInfoKHR
{
using NativeType = VkAccelerationStructureVersionInfoKHR;
@@ -3481,7 +3782,7 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
#else
- bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
@@ -12560,6 +12861,254 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {};
};
+ struct ColorBlendAdvancedEXT
+ {
+ using NativeType = VkColorBlendAdvancedEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated,
+ VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT
+ : advancedBlendOp( advancedBlendOp_ )
+ , srcPremultiplied( srcPremultiplied_ )
+ , dstPremultiplied( dstPremultiplied_ )
+ , blendOverlap( blendOverlap_ )
+ , clampResults( clampResults_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ColorBlendAdvancedEXT( *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ advancedBlendOp = advancedBlendOp_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcPremultiplied = srcPremultiplied_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstPremultiplied = dstPremultiplied_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ blendOverlap = blendOverlap_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT
+ {
+ clampResults = clampResults_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkColorBlendAdvancedEXT *>( this );
+ }
+
+ operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkColorBlendAdvancedEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::BlendOp const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ColorBlendAdvancedEXT const & ) const = default;
+#else
+ bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
+ ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults );
+# endif
+ }
+
+ bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
+ VULKAN_HPP_NAMESPACE::Bool32 clampResults = {};
+ };
+
+ struct ColorBlendEquationEXT
+ {
+ using NativeType = VkColorBlendEquationEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT
+ : srcColorBlendFactor( srcColorBlendFactor_ )
+ , dstColorBlendFactor( dstColorBlendFactor_ )
+ , colorBlendOp( colorBlendOp_ )
+ , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
+ , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
+ , alphaBlendOp( alphaBlendOp_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ColorBlendEquationEXT( *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcColorBlendFactor = srcColorBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstColorBlendFactor = dstColorBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorBlendOp = colorBlendOp_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAlphaBlendFactor = srcAlphaBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAlphaBlendFactor = dstAlphaBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ alphaBlendOp = alphaBlendOp_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkColorBlendEquationEXT *>( this );
+ }
+
+ operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkColorBlendEquationEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendOp const &,
+ VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendOp const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ColorBlendEquationEXT const & ) const = default;
+#else
+ bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) &&
+ ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp );
+# endif
+ }
+
+ bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ };
+
struct CommandBufferAllocateInfo
{
using NativeType = VkCommandBufferAllocateInfo;
@@ -16098,6 +16647,326 @@ namespace VULKAN_HPP_NAMESPACE
using Type = CopyMemoryToAccelerationStructureInfoKHR;
};
+ struct CopyMemoryToMicromapInfoEXT
+ {
+ using NativeType = VkCopyMemoryToMicromapInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
+ , dst( dst_ )
+ , mode( mode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMemoryToMicromapInfoEXT( *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( this );
+ }
+
+ operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMemoryToMicromapInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, src, dst, mode );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMemoryToMicromapInfoEXT>
+ {
+ using Type = CopyMemoryToMicromapInfoEXT;
+ };
+
+ struct CopyMicromapInfoEXT
+ {
+ using NativeType = VkCopyMicromapInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
+ , dst( dst_ )
+ , mode( mode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMicromapInfoEXT *>( this );
+ }
+
+ operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMicromapInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, src, dst, mode );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CopyMicromapInfoEXT const & ) const = default;
+#else
+ bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
+# endif
+ }
+
+ bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMicromapInfoEXT>
+ {
+ using Type = CopyMicromapInfoEXT;
+ };
+
+ struct CopyMicromapToMemoryInfoEXT
+ {
+ using NativeType = VkCopyMicromapToMemoryInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
+ , dst( dst_ )
+ , mode( mode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMicromapToMemoryInfoEXT( *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( this );
+ }
+
+ operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMicromapToMemoryInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &,
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, src, dst, mode );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMicromapToMemoryInfoEXT>
+ {
+ using Type = CopyMicromapToMemoryInfoEXT;
+ };
+
struct CuFunctionCreateInfoNVX
{
using NativeType = VkCuFunctionCreateInfoNVX;
@@ -20994,6 +21863,138 @@ namespace VULKAN_HPP_NAMESPACE
};
using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+ struct DeviceAddressBindingCallbackDataEXT
+ {
+ using NativeType = VkDeviceAddressBindingCallbackDataEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT(
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , baseAddress( baseAddress_ )
+ , size( size_ )
+ , bindingType( bindingType_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast<DeviceAddressBindingCallbackDataEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ baseAddress = baseAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT &
+ setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindingType = bindingType_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT *>( this );
+ }
+
+ operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, baseAddress, size, bindingType );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default;
+#else
+ bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) &&
+ ( bindingType == rhs.bindingType );
+# endif
+ }
+
+ bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceAddressBindingCallbackDataEXT>
+ {
+ using Type = DeviceAddressBindingCallbackDataEXT;
+ };
+
struct DeviceBufferMemoryRequirements
{
using NativeType = VkDeviceBufferMemoryRequirements;
@@ -22537,6 +23538,607 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DeviceEventInfoEXT;
};
+ struct DeviceFaultAddressInfoEXT
+ {
+ using NativeType = VkDeviceFaultAddressInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ DeviceFaultAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone,
+ VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT
+ : addressType( addressType_ )
+ , reportedAddress( reportedAddress_ )
+ , addressPrecision( addressPrecision_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultAddressInfoEXT( *reinterpret_cast<DeviceFaultAddressInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressType = addressType_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ reportedAddress = reportedAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressPrecision = addressPrecision_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultAddressInfoEXT *>( this );
+ }
+
+ operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultAddressInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( addressType, reportedAddress, addressPrecision );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision );
+# endif
+ }
+
+ bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone;
+ VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {};
+ };
+
+ struct DeviceFaultCountsEXT
+ {
+ using NativeType = VkDeviceFaultCountsEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t addressInfoCount_ = {},
+ uint32_t vendorInfoCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , addressInfoCount( addressInfoCount_ )
+ , vendorInfoCount( vendorInfoCount_ )
+ , vendorBinarySize( vendorBinarySize_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultCountsEXT( *reinterpret_cast<DeviceFaultCountsEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressInfoCount = addressInfoCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorInfoCount = vendorInfoCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorBinarySize = vendorBinarySize_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultCountsEXT *>( this );
+ }
+
+ operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultCountsEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultCountsEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) &&
+ ( vendorBinarySize == rhs.vendorBinarySize );
+# endif
+ }
+
+ bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT;
+ void * pNext = {};
+ uint32_t addressInfoCount = {};
+ uint32_t vendorInfoCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceFaultCountsEXT>
+ {
+ using Type = DeviceFaultCountsEXT;
+ };
+
+ struct DeviceFaultVendorInfoEXT
+ {
+ using NativeType = VkDeviceFaultVendorInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ uint64_t vendorFaultCode_ = {},
+ uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : description( description_ )
+ , vendorFaultCode( vendorFaultCode_ )
+ , vendorFaultData( vendorFaultData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultVendorInfoEXT( *reinterpret_cast<DeviceFaultVendorInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array<char, VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+ {
+ description = description_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorFaultCode = vendorFaultCode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorFaultData = vendorFaultData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultVendorInfoEXT *>( this );
+ }
+
+ operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultVendorInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint64_t const &, uint64_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( description, vendorFaultCode, vendorFaultData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultVendorInfoEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( description == rhs.description ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData );
+# endif
+ }
+
+ bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ uint64_t vendorFaultCode = {};
+ uint64_t vendorFaultData = {};
+ };
+
+ struct DeviceFaultInfoEXT
+ {
+ using NativeType = VkDeviceFaultInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {},
+ void * pVendorBinaryData_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , description( description_ )
+ , pAddressInfos( pAddressInfos_ )
+ , pVendorInfos( pVendorInfos_ )
+ , pVendorBinaryData( pVendorBinaryData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast<DeviceFaultInfoEXT const *>( &rhs ) ) {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultInfoEXT & operator=( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setDescription( std::array<char, VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+ {
+ description = description_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPAddressInfos( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pAddressInfos = pAddressInfos_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorInfos( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVendorInfos = pVendorInfos_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorBinaryData( void * pVendorBinaryData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVendorBinaryData = pVendorBinaryData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultInfoEXT *>( this );
+ }
+
+ operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &,
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &,
+ void * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultInfoEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( description == rhs.description ) && ( pAddressInfos == rhs.pAddressInfos ) &&
+ ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData );
+# endif
+ }
+
+ bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {};
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {};
+ void * pVendorBinaryData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceFaultInfoEXT>
+ {
+ using Type = DeviceFaultInfoEXT;
+ };
+
+ struct DeviceFaultVendorBinaryHeaderVersionOneEXT
+ {
+ using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT(
+ uint32_t headerSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne,
+ uint32_t vendorID_ = {},
+ uint32_t deviceID_ = {},
+ uint32_t driverVersion_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
+ uint32_t applicationNameOffset_ = {},
+ uint32_t applicationVersion_ = {},
+ uint32_t engineNameOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : headerSize( headerSize_ )
+ , headerVersion( headerVersion_ )
+ , vendorID( vendorID_ )
+ , deviceID( deviceID_ )
+ , driverVersion( driverVersion_ )
+ , pipelineCacheUUID( pipelineCacheUUID_ )
+ , applicationNameOffset( applicationNameOffset_ )
+ , applicationVersion( applicationVersion_ )
+ , engineNameOffset( engineNameOffset_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast<DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ headerSize = headerSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT &
+ setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ headerVersion = headerVersion_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorID = vendorID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceID = deviceID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ driverVersion = driverVersion_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT &
+ setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineCacheUUID = pipelineCacheUUID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ applicationNameOffset = applicationNameOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ applicationVersion = applicationVersion_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ engineNameOffset = engineNameOffset_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
+ }
+
+ operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &,
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ headerSize, headerVersion, vendorID, deviceID, driverVersion, pipelineCacheUUID, applicationNameOffset, applicationVersion, engineNameOffset );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
+ ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
+ ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) &&
+ ( engineNameOffset == rhs.engineNameOffset );
+# endif
+ }
+
+ bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t headerSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne;
+ uint32_t vendorID = {};
+ uint32_t deviceID = {};
+ uint32_t driverVersion = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+ uint32_t applicationNameOffset = {};
+ uint32_t applicationVersion = {};
+ uint32_t engineNameOffset = {};
+ };
+
struct DeviceGroupBindSparseInfo
{
using NativeType = VkDeviceGroupBindSparseInfo;
@@ -43475,6 +45077,706 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
+ struct MicromapBuildInfoEXT
+ {
+ using NativeType = VkMicromapBuildInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap,
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild,
+ VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {},
+ uint32_t usageCountsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
+ , flags( flags_ )
+ , mode( mode_ )
+ , dstMicromap( dstMicromap_ )
+ , usageCountsCount( usageCountsCount_ )
+ , pUsageCounts( pUsageCounts_ )
+ , ppUsageCounts( ppUsageCounts_ )
+ , data( data_ )
+ , scratchData( scratchData_ )
+ , triangleArray( triangleArray_ )
+ , triangleArrayStride( triangleArrayStride_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapBuildInfoEXT( *reinterpret_cast<MicromapBuildInfoEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_,
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_,
+ VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , type( type_ )
+ , flags( flags_ )
+ , mode( mode_ )
+ , dstMicromap( dstMicromap_ )
+ , usageCountsCount( static_cast<uint32_t>( usageCounts_.size() ) )
+ , pUsageCounts( usageCounts_.data() )
+ , ppUsageCounts( pUsageCounts_.data() )
+ , data( data_ )
+ , scratchData( scratchData_ )
+ , triangleArray( triangleArray_ )
+ , triangleArrayStride( triangleArrayStride_ )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) );
+# else
+ if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) )
+ {
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
+ {
+ type = type_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstMicromap = dstMicromap_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = usageCountsCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pUsageCounts = pUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ MicromapBuildInfoEXT &
+ setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
+ pUsageCounts = usageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ppUsageCounts = ppUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ MicromapBuildInfoEXT & setPUsageCounts(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
+ ppUsageCounts = pUsageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+ {
+ data = data_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ scratchData = scratchData_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT &
+ setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT
+ {
+ triangleArray = triangleArray_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ triangleArrayStride = triangleArrayStride_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapBuildInfoEXT *>( this );
+ }
+
+ operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapBuildInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &,
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild;
+ VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {};
+ uint32_t usageCountsCount = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapBuildInfoEXT>
+ {
+ using Type = MicromapBuildInfoEXT;
+ };
+
+ struct MicromapBuildSizesInfoEXT
+ {
+ using NativeType = VkMicromapBuildSizesInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , micromapSize( micromapSize_ )
+ , buildScratchSize( buildScratchSize_ )
+ , discardable( discardable_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapBuildSizesInfoEXT( *reinterpret_cast<MicromapBuildSizesInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromapSize = micromapSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buildScratchSize = buildScratchSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ discardable = discardable_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapBuildSizesInfoEXT *>( this );
+ }
+
+ operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default;
+#else
+ bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) &&
+ ( discardable == rhs.discardable );
+# endif
+ }
+
+ bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 discardable = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapBuildSizesInfoEXT>
+ {
+ using Type = MicromapBuildSizesInfoEXT;
+ };
+
+ struct MicromapCreateInfoEXT
+ {
+ using NativeType = VkMicromapCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap,
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , createFlags( createFlags_ )
+ , buffer( buffer_ )
+ , offset( offset_ )
+ , size( size_ )
+ , type( type_ )
+ , deviceAddress( deviceAddress_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapCreateInfoEXT( *reinterpret_cast<MicromapCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ createFlags = createFlags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
+ {
+ type = type_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceAddress = deviceAddress_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapCreateInfoEXT *>( this );
+ }
+
+ operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::Buffer const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
+ ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress );
+# endif
+ }
+
+ bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapCreateInfoEXT>
+ {
+ using Type = MicromapCreateInfoEXT;
+ };
+
+ struct MicromapTriangleEXT
+ {
+ using NativeType = VkMicromapTriangleEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dataOffset( dataOffset_ )
+ , subdivisionLevel( subdivisionLevel_ )
+ , format( format_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast<MicromapTriangleEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataOffset = dataOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subdivisionLevel = subdivisionLevel_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT
+ {
+ format = format_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapTriangleEXT *>( this );
+ }
+
+ operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapTriangleEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, uint16_t const &, uint16_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( dataOffset, subdivisionLevel, format );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapTriangleEXT const & ) const = default;
+#else
+ bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
+# endif
+ }
+
+ bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t dataOffset = {};
+ uint16_t subdivisionLevel = {};
+ uint16_t format = {};
+ };
+
+ struct MicromapVersionInfoEXT
+ {
+ using NativeType = VkMicromapVersionInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pVersionData( pVersionData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapVersionInfoEXT( *reinterpret_cast<MicromapVersionInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVersionData = pVersionData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapVersionInfoEXT *>( this );
+ }
+
+ operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapVersionInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pVersionData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapVersionInfoEXT const & ) const = default;
+#else
+ bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
+# endif
+ }
+
+ bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT;
+ const void * pNext = {};
+ const uint8_t * pVersionData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapVersionInfoEXT>
+ {
+ using Type = MicromapVersionInfoEXT;
+ };
+
struct MultiDrawIndexedInfoEXT
{
using NativeType = VkMultiDrawIndexedInfoEXT;
@@ -44196,6 +46498,624 @@ namespace VULKAN_HPP_NAMESPACE
};
using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
+ struct OpticalFlowExecuteInfoNV
+ {
+ using NativeType = VkOpticalFlowExecuteInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {},
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , regionCount( regionCount_ )
+ , pRegions( pRegions_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowExecuteInfoNV( *reinterpret_cast<OpticalFlowExecuteInfoNV const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_,
+ void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ OpticalFlowExecuteInfoNV &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowExecuteInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, regionCount, pRegions );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+# endif
+ }
+
+ bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {};
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowExecuteInfoNV>
+ {
+ using Type = OpticalFlowExecuteInfoNV;
+ };
+
+ struct OpticalFlowImageFormatInfoNV
+ {
+ using NativeType = VkOpticalFlowImageFormatInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , usage( usage_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowImageFormatInfoNV( *reinterpret_cast<OpticalFlowImageFormatInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usage = usage_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowImageFormatInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, usage );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
+# endif
+ }
+
+ bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowImageFormatInfoNV>
+ {
+ using Type = OpticalFlowImageFormatInfoNV;
+ };
+
+ struct OpticalFlowImageFormatPropertiesNV
+ {
+ using NativeType = VkOpticalFlowImageFormatPropertiesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast<OpticalFlowImageFormatPropertiesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV *>( this );
+ }
+
+ operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, format );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format );
+# endif
+ }
+
+ bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowImageFormatPropertiesNV>
+ {
+ using Type = OpticalFlowImageFormatPropertiesNV;
+ };
+
+ struct OpticalFlowSessionCreateInfoNV
+ {
+ using NativeType = VkOpticalFlowSessionCreateInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV(
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {},
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {},
+ VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , width( width_ )
+ , height( height_ )
+ , imageFormat( imageFormat_ )
+ , flowVectorFormat( flowVectorFormat_ )
+ , costFormat( costFormat_ )
+ , outputGridSize( outputGridSize_ )
+ , hintGridSize( hintGridSize_ )
+ , performanceLevel( performanceLevel_ )
+ , flags( flags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowSessionCreateInfoNV( *reinterpret_cast<OpticalFlowSessionCreateInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ {
+ width = width_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ {
+ height = height_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageFormat = imageFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flowVectorFormat = flowVectorFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ costFormat = costFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV &
+ setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ outputGridSize = outputGridSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV &
+ setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hintGridSize = hintGridSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV &
+ setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ performanceLevel = performanceLevel_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowSessionCreateInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) &&
+ ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) &&
+ ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags );
+# endif
+ }
+
+ bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV;
+ void * pNext = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown;
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowSessionCreateInfoNV>
+ {
+ using Type = OpticalFlowSessionCreateInfoNV;
+ };
+
+ struct OpticalFlowSessionCreatePrivateDataInfoNV
+ {
+ using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t id_ = {},
+ uint32_t size_ = {},
+ const void * pPrivateData_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , id( id_ )
+ , size( size_ )
+ , pPrivateData( pPrivateData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast<OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT
+ {
+ id = id_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPrivateData = pPrivateData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, const void * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, id, size, pPrivateData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData );
+# endif
+ }
+
+ bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
+ void * pNext = {};
+ uint32_t id = {};
+ uint32_t size = {};
+ const void * pPrivateData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV>
+ {
+ using Type = OpticalFlowSessionCreatePrivateDataInfoNV;
+ };
+
struct PastPresentationTimingGOOGLE
{
using NativeType = VkPastPresentationTimingGOOGLE;
@@ -45949,6 +48869,105 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
};
+ struct PhysicalDeviceAddressBindingReportFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , reportAddressBinding( reportAddressBinding_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT &
+ setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT
+ {
+ reportAddressBinding = reportAddressBinding_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, reportAddressBinding );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT>
+ {
+ using Type = PhysicalDeviceAddressBindingReportFeaturesEXT;
+ };
+
struct PhysicalDeviceAmigoProfilingFeaturesSEC
{
using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC;
@@ -50058,6 +53077,600 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
};
+ struct PhysicalDeviceExtendedDynamicState3FeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , extendedDynamicState3TessellationDomainOrigin( extendedDynamicState3TessellationDomainOrigin_ )
+ , extendedDynamicState3DepthClampEnable( extendedDynamicState3DepthClampEnable_ )
+ , extendedDynamicState3PolygonMode( extendedDynamicState3PolygonMode_ )
+ , extendedDynamicState3RasterizationSamples( extendedDynamicState3RasterizationSamples_ )
+ , extendedDynamicState3SampleMask( extendedDynamicState3SampleMask_ )
+ , extendedDynamicState3AlphaToCoverageEnable( extendedDynamicState3AlphaToCoverageEnable_ )
+ , extendedDynamicState3AlphaToOneEnable( extendedDynamicState3AlphaToOneEnable_ )
+ , extendedDynamicState3LogicOpEnable( extendedDynamicState3LogicOpEnable_ )
+ , extendedDynamicState3ColorBlendEnable( extendedDynamicState3ColorBlendEnable_ )
+ , extendedDynamicState3ColorBlendEquation( extendedDynamicState3ColorBlendEquation_ )
+ , extendedDynamicState3ColorWriteMask( extendedDynamicState3ColorWriteMask_ )
+ , extendedDynamicState3RasterizationStream( extendedDynamicState3RasterizationStream_ )
+ , extendedDynamicState3ConservativeRasterizationMode( extendedDynamicState3ConservativeRasterizationMode_ )
+ , extendedDynamicState3ExtraPrimitiveOverestimationSize( extendedDynamicState3ExtraPrimitiveOverestimationSize_ )
+ , extendedDynamicState3DepthClipEnable( extendedDynamicState3DepthClipEnable_ )
+ , extendedDynamicState3SampleLocationsEnable( extendedDynamicState3SampleLocationsEnable_ )
+ , extendedDynamicState3ColorBlendAdvanced( extendedDynamicState3ColorBlendAdvanced_ )
+ , extendedDynamicState3ProvokingVertexMode( extendedDynamicState3ProvokingVertexMode_ )
+ , extendedDynamicState3LineRasterizationMode( extendedDynamicState3LineRasterizationMode_ )
+ , extendedDynamicState3LineStippleEnable( extendedDynamicState3LineStippleEnable_ )
+ , extendedDynamicState3DepthClipNegativeOneToOne( extendedDynamicState3DepthClipNegativeOneToOne_ )
+ , extendedDynamicState3ViewportWScalingEnable( extendedDynamicState3ViewportWScalingEnable_ )
+ , extendedDynamicState3ViewportSwizzle( extendedDynamicState3ViewportSwizzle_ )
+ , extendedDynamicState3CoverageToColorEnable( extendedDynamicState3CoverageToColorEnable_ )
+ , extendedDynamicState3CoverageToColorLocation( extendedDynamicState3CoverageToColorLocation_ )
+ , extendedDynamicState3CoverageModulationMode( extendedDynamicState3CoverageModulationMode_ )
+ , extendedDynamicState3CoverageModulationTableEnable( extendedDynamicState3CoverageModulationTableEnable_ )
+ , extendedDynamicState3CoverageModulationTable( extendedDynamicState3CoverageModulationTable_ )
+ , extendedDynamicState3CoverageReductionMode( extendedDynamicState3CoverageReductionMode_ )
+ , extendedDynamicState3RepresentativeFragmentTestEnable( extendedDynamicState3RepresentativeFragmentTestEnable_ )
+ , extendedDynamicState3ShadingRateImageEnable( extendedDynamicState3ShadingRateImageEnable_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ extendedDynamicState3TessellationDomainOrigin,
+ extendedDynamicState3DepthClampEnable,
+ extendedDynamicState3PolygonMode,
+ extendedDynamicState3RasterizationSamples,
+ extendedDynamicState3SampleMask,
+ extendedDynamicState3AlphaToCoverageEnable,
+ extendedDynamicState3AlphaToOneEnable,
+ extendedDynamicState3LogicOpEnable,
+ extendedDynamicState3ColorBlendEnable,
+ extendedDynamicState3ColorBlendEquation,
+ extendedDynamicState3ColorWriteMask,
+ extendedDynamicState3RasterizationStream,
+ extendedDynamicState3ConservativeRasterizationMode,
+ extendedDynamicState3ExtraPrimitiveOverestimationSize,
+ extendedDynamicState3DepthClipEnable,
+ extendedDynamicState3SampleLocationsEnable,
+ extendedDynamicState3ColorBlendAdvanced,
+ extendedDynamicState3ProvokingVertexMode,
+ extendedDynamicState3LineRasterizationMode,
+ extendedDynamicState3LineStippleEnable,
+ extendedDynamicState3DepthClipNegativeOneToOne,
+ extendedDynamicState3ViewportWScalingEnable,
+ extendedDynamicState3ViewportSwizzle,
+ extendedDynamicState3CoverageToColorEnable,
+ extendedDynamicState3CoverageToColorLocation,
+ extendedDynamicState3CoverageModulationMode,
+ extendedDynamicState3CoverageModulationTableEnable,
+ extendedDynamicState3CoverageModulationTable,
+ extendedDynamicState3CoverageReductionMode,
+ extendedDynamicState3RepresentativeFragmentTestEnable,
+ extendedDynamicState3ShadingRateImageEnable );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
+ ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) &&
+ ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) &&
+ ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) &&
+ ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) &&
+ ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) &&
+ ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) &&
+ ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) &&
+ ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) &&
+ ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) &&
+ ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) &&
+ ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) &&
+ ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) &&
+ ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) &&
+ ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) &&
+ ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) &&
+ ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) &&
+ ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) &&
+ ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) &&
+ ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) &&
+ ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) &&
+ ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) &&
+ ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) &&
+ ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) &&
+ ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) &&
+ ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) &&
+ ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) &&
+ ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) &&
+ ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) &&
+ ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) &&
+ ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) &&
+ ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT>
+ {
+ using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ };
+
+ struct PhysicalDeviceExtendedDynamicState3PropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dynamicPrimitiveTopologyUnrestricted( dynamicPrimitiveTopologyUnrestricted_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT &
+ setDynamicPrimitiveTopologyUnrestricted( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dynamicPrimitiveTopologyUnrestricted = dynamicPrimitiveTopologyUnrestricted_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT>
+ {
+ using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ };
+
struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
{
using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
@@ -50761,6 +54374,114 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+ struct PhysicalDeviceFaultFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceFaultFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceFault( deviceFault_ )
+ , deviceFaultVendorBinary( deviceFaultVendorBinary_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast<PhysicalDeviceFaultFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceFault = deviceFault_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT &
+ setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceFaultVendorBinary = deviceFaultVendorBinary_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) &&
+ ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFaultFeaturesEXT>
+ {
+ using Type = PhysicalDeviceFaultFeaturesEXT;
+ };
+
struct PhysicalDeviceFeatures2
{
using NativeType = VkPhysicalDeviceFeatures2;
@@ -58040,6 +61761,452 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
};
+ struct PhysicalDeviceOpacityMicromapFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , micromap( micromap_ )
+ , micromapCaptureReplay( micromapCaptureReplay_ )
+ , micromapHostCommands( micromapHostCommands_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromap = micromap_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT &
+ setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromapCaptureReplay = micromapCaptureReplay_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT &
+ setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromapHostCommands = micromapHostCommands_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) &&
+ ( micromapHostCommands == rhs.micromapHostCommands );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 micromap = {};
+ VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT>
+ {
+ using Type = PhysicalDeviceOpacityMicromapFeaturesEXT;
+ };
+
+ struct PhysicalDeviceOpacityMicromapPropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {},
+ uint32_t maxOpacity4StateSubdivisionLevel_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxOpacity2StateSubdivisionLevel( maxOpacity2StateSubdivisionLevel_ )
+ , maxOpacity4StateSubdivisionLevel( maxOpacity4StateSubdivisionLevel_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) &&
+ ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
+ void * pNext = {};
+ uint32_t maxOpacity2StateSubdivisionLevel = {};
+ uint32_t maxOpacity4StateSubdivisionLevel = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT>
+ {
+ using Type = PhysicalDeviceOpacityMicromapPropertiesEXT;
+ };
+
+ struct PhysicalDeviceOpticalFlowFeaturesNV
+ {
+ using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , opticalFlow( opticalFlow_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT
+ {
+ opticalFlow = opticalFlow_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, opticalFlow );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowFeaturesNV>
+ {
+ using Type = PhysicalDeviceOpticalFlowFeaturesNV;
+ };
+
+ struct PhysicalDeviceOpticalFlowPropertiesNV
+ {
+ using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {},
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {},
+ uint32_t minWidth_ = {},
+ uint32_t minHeight_ = {},
+ uint32_t maxWidth_ = {},
+ uint32_t maxHeight_ = {},
+ uint32_t maxNumRegionsOfInterest_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , supportedOutputGridSizes( supportedOutputGridSizes_ )
+ , supportedHintGridSizes( supportedHintGridSizes_ )
+ , hintSupported( hintSupported_ )
+ , costSupported( costSupported_ )
+ , bidirectionalFlowSupported( bidirectionalFlowSupported_ )
+ , globalFlowSupported( globalFlowSupported_ )
+ , minWidth( minWidth_ )
+ , minHeight( minHeight_ )
+ , maxWidth( maxWidth_ )
+ , maxHeight( maxHeight_ )
+ , maxNumRegionsOfInterest( maxNumRegionsOfInterest_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ supportedOutputGridSizes,
+ supportedHintGridSizes,
+ hintSupported,
+ costSupported,
+ bidirectionalFlowSupported,
+ globalFlowSupported,
+ minWidth,
+ minHeight,
+ maxWidth,
+ maxHeight,
+ maxNumRegionsOfInterest );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) &&
+ ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) &&
+ ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) &&
+ ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) &&
+ ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {};
+ VULKAN_HPP_NAMESPACE::Bool32 costSupported = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {};
+ VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {};
+ uint32_t minWidth = {};
+ uint32_t minHeight = {};
+ uint32_t maxWidth = {};
+ uint32_t maxHeight = {};
+ uint32_t maxNumRegionsOfInterest = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowPropertiesNV>
+ {
+ using Type = PhysicalDeviceOpticalFlowPropertiesNV;
+ };
+
struct PhysicalDevicePCIBusInfoPropertiesEXT
{
using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;
@@ -58721,6 +62888,105 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDevicePipelinePropertiesFeaturesEXT;
};
+ struct PhysicalDevicePipelineProtectedAccessFeaturesEXT
+ {
+ using NativeType = VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineProtectedAccess( pipelineProtectedAccess_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelineProtectedAccessFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineProtectedAccessFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT &
+ setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineProtectedAccess = pipelineProtectedAccess_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pipelineProtectedAccess );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT>
+ {
+ using Type = PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ };
+
struct PhysicalDevicePipelineRobustnessFeaturesEXT
{
using NativeType = VkPhysicalDevicePipelineRobustnessFeaturesEXT;
@@ -59389,6 +63655,103 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ struct PhysicalDevicePresentBarrierFeaturesNV
+ {
+ using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentBarrier( presentBarrier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast<PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ presentBarrier = presentBarrier_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
+ }
+
+ operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, presentBarrier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePresentBarrierFeaturesNV>
+ {
+ using Type = PhysicalDevicePresentBarrierFeaturesNV;
+ };
+
struct PhysicalDevicePresentIdFeaturesKHR
{
using NativeType = VkPhysicalDevicePresentIdFeaturesKHR;
@@ -87442,6 +91805,104 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ struct SurfaceCapabilitiesPresentBarrierNV
+ {
+ using NativeType = VkSurfaceCapabilitiesPresentBarrierNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentBarrierSupported( presentBarrierSupported_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast<SurfaceCapabilitiesPresentBarrierNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV &
+ setPresentBarrierSupported( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ ) VULKAN_HPP_NOEXCEPT
+ {
+ presentBarrierSupported = presentBarrierSupported_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV *>( this );
+ }
+
+ operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, presentBarrierSupported );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default;
+#else
+ bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierSupported == rhs.presentBarrierSupported );
+# endif
+ }
+
+ bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentBarrierNV>
+ {
+ using Type = SurfaceCapabilitiesPresentBarrierNV;
+ };
+
struct SurfaceFormatKHR
{
using NativeType = VkSurfaceFormatKHR;
@@ -88404,6 +92865,104 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
};
+ struct SwapchainPresentBarrierCreateInfoNV
+ {
+ using NativeType = VkSwapchainPresentBarrierCreateInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentBarrierEnable( presentBarrierEnable_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast<SwapchainPresentBarrierCreateInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV &
+ setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ presentBarrierEnable = presentBarrierEnable_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV *>( this );
+ }
+
+ operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, presentBarrierEnable );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default;
+#else
+ bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierEnable == rhs.presentBarrierEnable );
+# endif
+ }
+
+ bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSwapchainPresentBarrierCreateInfoNV>
+ {
+ using Type = SwapchainPresentBarrierCreateInfoNV;
+ };
+
struct TextureLODGatherFormatPropertiesAMD
{
using NativeType = VkTextureLODGatherFormatPropertiesAMD;
@@ -90048,7 +94607,7 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int8_t slotIndex_ = {},
+ VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t slotIndex_ = {},
const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
@@ -90080,7 +94639,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
{
slotIndex = slotIndex_;
return *this;
@@ -90110,7 +94669,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
- int8_t const &,
+ int32_t const &,
const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
@@ -90140,7 +94699,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR;
const void * pNext = {};
- int8_t slotIndex = {};
+ int32_t slotIndex = {};
const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {};
};
diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp
index d445871..24fa9c2 100644
--- a/include/vulkan/vulkan_to_string.hpp
+++ b/include/vulkan/vulkan_to_string.hpp
@@ -276,6 +276,8 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & QueueFlagBits::eVideoEncodeKHR )
result += "VideoEncodeKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & QueueFlagBits::eOpticalFlowNV )
+ result += "OpticalFlowNV | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -610,6 +612,10 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR )
result += "VideoEncodeSrcKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT )
+ result += "MicromapBuildInputReadOnlyEXT | ";
+ if ( value & BufferUsageFlagBits::eMicromapStorageEXT )
+ result += "MicromapStorageEXT | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -752,6 +758,12 @@ namespace VULKAN_HPP_NAMESPACE
result += "ColorAttachmentFeedbackLoopEXT | ";
if ( value & PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT )
result += "DepthStencilAttachmentFeedbackLoopEXT | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT )
+ result += "RayTracingOpacityMicromapEXT | ";
+ if ( value & PipelineCreateFlagBits::eNoProtectedAccessEXT )
+ result += "NoProtectedAccessEXT | ";
+ if ( value & PipelineCreateFlagBits::eProtectedAccessOnlyEXT )
+ result += "ProtectedAccessOnlyEXT | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -1559,6 +1571,10 @@ namespace VULKAN_HPP_NAMESPACE
result += "InvocationMaskHUAWEI | ";
if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR )
result += "AccelerationStructureCopyKHR | ";
+ if ( value & PipelineStageFlagBits2::eMicromapBuildEXT )
+ result += "MicromapBuildEXT | ";
+ if ( value & PipelineStageFlagBits2::eOpticalFlowNV )
+ result += "OpticalFlowNV | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -1645,6 +1661,14 @@ namespace VULKAN_HPP_NAMESPACE
result += "InvocationMaskReadHUAWEI | ";
if ( value & AccessFlagBits2::eShaderBindingTableReadKHR )
result += "ShaderBindingTableReadKHR | ";
+ if ( value & AccessFlagBits2::eMicromapReadEXT )
+ result += "MicromapReadEXT | ";
+ if ( value & AccessFlagBits2::eMicromapWriteEXT )
+ result += "MicromapWriteEXT | ";
+ if ( value & AccessFlagBits2::eOpticalFlowReadNV )
+ result += "OpticalFlowReadNV | ";
+ if ( value & AccessFlagBits2::eOpticalFlowWriteNV )
+ result += "OpticalFlowWriteNV | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -1767,6 +1791,12 @@ namespace VULKAN_HPP_NAMESPACE
result += "BlockMatchingQCOM | ";
if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM )
result += "BoxFilterSampledQCOM | ";
+ if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV )
+ result += "OpticalFlowImageNV | ";
+ if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV )
+ result += "OpticalFlowVectorNV | ";
+ if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV )
+ result += "OpticalFlowCostNV | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -2542,6 +2572,8 @@ namespace VULKAN_HPP_NAMESPACE
result += "Validation | ";
if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
result += "Performance | ";
+ if ( value & DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding )
+ result += "DeviceAddressBinding | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -2593,6 +2625,10 @@ namespace VULKAN_HPP_NAMESPACE
result += "ForceOpaque | ";
if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque )
result += "ForceNoOpaque | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT )
+ result += "ForceOpacityMicromap2StateEXT | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT )
+ result += "DisableOpacityMicromapsEXT | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -2615,6 +2651,12 @@ namespace VULKAN_HPP_NAMESPACE
result += "LowMemory | ";
if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV )
result += "MotionNV | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT )
+ result += "AllowOpacityMicromapUpdateEXT | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT )
+ result += "AllowDisableOpacityMicromapsEXT | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT )
+ result += "AllowOpacityMicromapDataUpdateEXT | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
@@ -2963,6 +3005,20 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ //=== VK_EXT_device_address_binding_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DeviceAddressBindingFlagBitsEXT::eInternalObject )
+ result += "InternalObject | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
@@ -3001,6 +3057,108 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & BuildMicromapFlagBitsEXT::ePreferFastTrace )
+ result += "PreferFastTrace | ";
+ if ( value & BuildMicromapFlagBitsEXT::ePreferFastBuild )
+ result += "PreferFastBuild | ";
+ if ( value & BuildMicromapFlagBitsEXT::eAllowCompaction )
+ result += "AllowCompaction | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay )
+ result += "DeviceAddressCaptureReplay | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowUsageFlagBitsNV::eInput )
+ result += "Input | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eOutput )
+ result += "Output | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eHint )
+ result += "Hint | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eCost )
+ result += "Cost | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eGlobalFlow )
+ result += "GlobalFlow | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e1X1 )
+ result += "1X1 | ";
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e2X2 )
+ result += "2X2 | ";
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e4X4 )
+ result += "4X4 | ";
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e8X8 )
+ result += "8X8 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableHint )
+ result += "EnableHint | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableCost )
+ result += "EnableCost | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow )
+ result += "EnableGlobalFlow | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eAllowRegions )
+ result += "AllowRegions | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eBothDirections )
+ result += "BothDirections | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints )
+ result += "DisableTemporalHints | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
//=======================
//=== ENUMs to_string ===
//=======================
@@ -3681,6 +3839,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: return "PhysicalDeviceCustomBorderColorPropertiesEXT";
case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: return "PhysicalDeviceCustomBorderColorFeaturesEXT";
case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR";
+ case StructureType::ePhysicalDevicePresentBarrierFeaturesNV: return "PhysicalDevicePresentBarrierFeaturesNV";
+ case StructureType::eSurfaceCapabilitiesPresentBarrierNV: return "SurfaceCapabilitiesPresentBarrierNV";
+ case StructureType::eSwapchainPresentBarrierCreateInfoNV: return "SwapchainPresentBarrierCreateInfoNV";
case StructureType::ePresentIdKHR: return "PresentIdKHR";
case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -3735,6 +3896,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT";
case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT";
case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT";
+ case StructureType::ePhysicalDeviceFaultFeaturesEXT: return "PhysicalDeviceFaultFeaturesEXT";
+ case StructureType::eDeviceFaultCountsEXT: return "DeviceFaultCountsEXT";
+ case StructureType::eDeviceFaultInfoEXT: return "DeviceFaultInfoEXT";
case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT";
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT";
@@ -3743,6 +3907,8 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT";
case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT";
case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT";
+ case StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT: return "PhysicalDeviceAddressBindingReportFeaturesEXT";
+ case StructureType::eDeviceAddressBindingCallbackDataEXT: return "DeviceAddressBindingCallbackDataEXT";
case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: return "PhysicalDeviceDepthClipControlFeaturesEXT";
case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: return "PipelineViewportDepthClipControlCreateInfoEXT";
case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT";
@@ -3787,6 +3953,16 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT";
case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT";
case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT";
+ case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT";
+ case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT";
+ case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT";
+ case StructureType::eCopyMicromapToMemoryInfoEXT: return "CopyMicromapToMemoryInfoEXT";
+ case StructureType::eCopyMemoryToMicromapInfoEXT: return "CopyMemoryToMicromapInfoEXT";
+ case StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT: return "PhysicalDeviceOpacityMicromapFeaturesEXT";
+ case StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT: return "PhysicalDeviceOpacityMicromapPropertiesEXT";
+ case StructureType::eMicromapCreateInfoEXT: return "MicromapCreateInfoEXT";
+ case StructureType::eMicromapBuildSizesInfoEXT: return "MicromapBuildSizesInfoEXT";
+ case StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT: return "AccelerationStructureTrianglesOpacityMicromapEXT";
case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT: return "PhysicalDeviceBorderColorSwizzleFeaturesEXT";
case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT";
case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT";
@@ -3803,6 +3979,8 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM";
case StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM: return "PhysicalDeviceImageProcessingPropertiesQCOM";
case StructureType::eImageViewSampleWeightCreateInfoQCOM: return "ImageViewSampleWeightCreateInfoQCOM";
+ case StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT: return "PhysicalDeviceExtendedDynamicState3FeaturesEXT";
+ case StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT: return "PhysicalDeviceExtendedDynamicState3PropertiesEXT";
case StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT: return "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT";
case StructureType::eRenderPassCreationControlEXT: return "RenderPassCreationControlEXT";
case StructureType::eRenderPassCreationFeedbackCreateInfoEXT: return "RenderPassCreationFeedbackCreateInfoEXT";
@@ -3812,7 +3990,15 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT: return "PipelineShaderStageModuleIdentifierCreateInfoEXT";
case StructureType::eShaderModuleIdentifierEXT: return "ShaderModuleIdentifierEXT";
case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT";
+ case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV: return "PhysicalDeviceOpticalFlowFeaturesNV";
+ case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV: return "PhysicalDeviceOpticalFlowPropertiesNV";
+ case StructureType::eOpticalFlowImageFormatInfoNV: return "OpticalFlowImageFormatInfoNV";
+ case StructureType::eOpticalFlowImageFormatPropertiesNV: return "OpticalFlowImageFormatPropertiesNV";
+ case StructureType::eOpticalFlowSessionCreateInfoNV: return "OpticalFlowSessionCreateInfoNV";
+ case StructureType::eOpticalFlowExecuteInfoNV: return "OpticalFlowExecuteInfoNV";
+ case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV";
case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT";
+ case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM";
case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM";
case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC";
@@ -3886,6 +4072,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_FUCHSIA )
case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ case ObjectType::eMicromapEXT: return "MicromapEXT";
+ case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -4155,6 +4343,7 @@ namespace VULKAN_HPP_NAMESPACE
case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
+ case Format::eR16G16S105NV: return "R16G16S105NV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -4354,6 +4543,7 @@ namespace VULKAN_HPP_NAMESPACE
case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR";
case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case QueueFlagBits::eOpticalFlowNV: return "OpticalFlowNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -4553,6 +4743,8 @@ namespace VULKAN_HPP_NAMESPACE
case QueryType::ePrimitivesGeneratedEXT: return "PrimitivesGeneratedEXT";
case QueryType::eAccelerationStructureSerializationBottomLevelPointersKHR: return "AccelerationStructureSerializationBottomLevelPointersKHR";
case QueryType::eAccelerationStructureSizeKHR: return "AccelerationStructureSizeKHR";
+ case QueryType::eMicromapSerializationSizeEXT: return "MicromapSerializationSizeEXT";
+ case QueryType::eMicromapCompactedSizeEXT: return "MicromapCompactedSizeEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -4603,6 +4795,8 @@ namespace VULKAN_HPP_NAMESPACE
case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT";
+ case BufferUsageFlagBits::eMicromapStorageEXT: return "MicromapStorageEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -4874,6 +5068,37 @@ namespace VULKAN_HPP_NAMESPACE
case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT";
case DynamicState::eLogicOpEXT: return "LogicOpEXT";
case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT";
+ case DynamicState::eTessellationDomainOriginEXT: return "TessellationDomainOriginEXT";
+ case DynamicState::eDepthClampEnableEXT: return "DepthClampEnableEXT";
+ case DynamicState::ePolygonModeEXT: return "PolygonModeEXT";
+ case DynamicState::eRasterizationSamplesEXT: return "RasterizationSamplesEXT";
+ case DynamicState::eSampleMaskEXT: return "SampleMaskEXT";
+ case DynamicState::eAlphaToCoverageEnableEXT: return "AlphaToCoverageEnableEXT";
+ case DynamicState::eAlphaToOneEnableEXT: return "AlphaToOneEnableEXT";
+ case DynamicState::eLogicOpEnableEXT: return "LogicOpEnableEXT";
+ case DynamicState::eColorBlendEnableEXT: return "ColorBlendEnableEXT";
+ case DynamicState::eColorBlendEquationEXT: return "ColorBlendEquationEXT";
+ case DynamicState::eColorWriteMaskEXT: return "ColorWriteMaskEXT";
+ case DynamicState::eRasterizationStreamEXT: return "RasterizationStreamEXT";
+ case DynamicState::eConservativeRasterizationModeEXT: return "ConservativeRasterizationModeEXT";
+ case DynamicState::eExtraPrimitiveOverestimationSizeEXT: return "ExtraPrimitiveOverestimationSizeEXT";
+ case DynamicState::eDepthClipEnableEXT: return "DepthClipEnableEXT";
+ case DynamicState::eSampleLocationsEnableEXT: return "SampleLocationsEnableEXT";
+ case DynamicState::eColorBlendAdvancedEXT: return "ColorBlendAdvancedEXT";
+ case DynamicState::eProvokingVertexModeEXT: return "ProvokingVertexModeEXT";
+ case DynamicState::eLineRasterizationModeEXT: return "LineRasterizationModeEXT";
+ case DynamicState::eLineStippleEnableEXT: return "LineStippleEnableEXT";
+ case DynamicState::eDepthClipNegativeOneToOneEXT: return "DepthClipNegativeOneToOneEXT";
+ case DynamicState::eViewportWScalingEnableNV: return "ViewportWScalingEnableNV";
+ case DynamicState::eViewportSwizzleNV: return "ViewportSwizzleNV";
+ case DynamicState::eCoverageToColorEnableNV: return "CoverageToColorEnableNV";
+ case DynamicState::eCoverageToColorLocationNV: return "CoverageToColorLocationNV";
+ case DynamicState::eCoverageModulationModeNV: return "CoverageModulationModeNV";
+ case DynamicState::eCoverageModulationTableEnableNV: return "CoverageModulationTableEnableNV";
+ case DynamicState::eCoverageModulationTableNV: return "CoverageModulationTableNV";
+ case DynamicState::eShadingRateImageEnableNV: return "ShadingRateImageEnableNV";
+ case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV";
+ case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -4942,6 +5167,9 @@ namespace VULKAN_HPP_NAMESPACE
case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV";
case PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT";
case PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT";
+ case PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT";
+ case PipelineCreateFlagBits::eNoProtectedAccessEXT: return "NoProtectedAccessEXT";
+ case PipelineCreateFlagBits::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -5809,6 +6037,8 @@ namespace VULKAN_HPP_NAMESPACE
case PipelineStageFlagBits2::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI";
case PipelineStageFlagBits2::eAccelerationStructureCopyKHR: return "AccelerationStructureCopyKHR";
+ case PipelineStageFlagBits2::eMicromapBuildEXT: return "MicromapBuildEXT";
+ case PipelineStageFlagBits2::eOpticalFlowNV: return "OpticalFlowNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -5857,6 +6087,10 @@ namespace VULKAN_HPP_NAMESPACE
case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI";
case AccessFlagBits2::eShaderBindingTableReadKHR: return "ShaderBindingTableReadKHR";
+ case AccessFlagBits2::eMicromapReadEXT: return "MicromapReadEXT";
+ case AccessFlagBits2::eMicromapWriteEXT: return "MicromapWriteEXT";
+ case AccessFlagBits2::eOpticalFlowReadNV: return "OpticalFlowReadNV";
+ case AccessFlagBits2::eOpticalFlowWriteNV: return "OpticalFlowWriteNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -5930,6 +6164,9 @@ namespace VULKAN_HPP_NAMESPACE
case FormatFeatureFlagBits2::eWeightSampledImageQCOM: return "WeightSampledImageQCOM";
case FormatFeatureFlagBits2::eBlockMatchingQCOM: return "BlockMatchingQCOM";
case FormatFeatureFlagBits2::eBoxFilterSampledQCOM: return "BoxFilterSampledQCOM";
+ case FormatFeatureFlagBits2::eOpticalFlowImageNV: return "OpticalFlowImageNV";
+ case FormatFeatureFlagBits2::eOpticalFlowVectorNV: return "OpticalFlowVectorNV";
+ case FormatFeatureFlagBits2::eOpticalFlowCostNV: return "OpticalFlowCostNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -6804,6 +7041,7 @@ namespace VULKAN_HPP_NAMESPACE
case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General";
case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation";
case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance";
+ case DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding: return "DeviceAddressBinding";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -6880,6 +7118,8 @@ namespace VULKAN_HPP_NAMESPACE
case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing";
case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque";
case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque";
+ case GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT: return "ForceOpacityMicromap2StateEXT";
+ case GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT: return "DisableOpacityMicromapsEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -6894,6 +7134,9 @@ namespace VULKAN_HPP_NAMESPACE
case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild";
case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory";
case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT: return "AllowOpacityMicromapUpdateEXT";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT: return "AllowDisableOpacityMicromapsEXT";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT: return "AllowOpacityMicromapDataUpdateEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@@ -7617,6 +7860,32 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ //=== VK_EXT_device_fault ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceFaultAddressTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceFaultAddressTypeEXT::eNone: return "None";
+ case DeviceFaultAddressTypeEXT::eReadInvalid: return "ReadInvalid";
+ case DeviceFaultAddressTypeEXT::eWriteInvalid: return "WriteInvalid";
+ case DeviceFaultAddressTypeEXT::eExecuteInvalid: return "ExecuteInvalid";
+ case DeviceFaultAddressTypeEXT::eInstructionPointerUnknown: return "InstructionPointerUnknown";
+ case DeviceFaultAddressTypeEXT::eInstructionPointerInvalid: return "InstructionPointerInvalid";
+ case DeviceFaultAddressTypeEXT::eInstructionPointerFault: return "InstructionPointerFault";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceFaultVendorBinaryHeaderVersionEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceFaultVendorBinaryHeaderVersionEXT::eOne: return "One";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
@@ -7651,6 +7920,27 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ //=== VK_EXT_device_address_binding_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceAddressBindingFlagBitsEXT::eInternalObject: return "InternalObject";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceAddressBindingTypeEXT::eBind: return "Bind";
+ case DeviceAddressBindingTypeEXT::eUnbind: return "Unbind";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
@@ -7682,6 +7972,80 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_INLINE std::string to_string( MicromapTypeEXT value )
+ {
+ switch ( value )
+ {
+ case MicromapTypeEXT::eOpacityMicromap: return "OpacityMicromap";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case BuildMicromapFlagBitsEXT::ePreferFastTrace: return "PreferFastTrace";
+ case BuildMicromapFlagBitsEXT::ePreferFastBuild: return "PreferFastBuild";
+ case BuildMicromapFlagBitsEXT::eAllowCompaction: return "AllowCompaction";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CopyMicromapModeEXT value )
+ {
+ switch ( value )
+ {
+ case CopyMicromapModeEXT::eClone: return "Clone";
+ case CopyMicromapModeEXT::eSerialize: return "Serialize";
+ case CopyMicromapModeEXT::eDeserialize: return "Deserialize";
+ case CopyMicromapModeEXT::eCompact: return "Compact";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildMicromapModeEXT value )
+ {
+ switch ( value )
+ {
+ case BuildMicromapModeEXT::eBuild: return "Build";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpacityMicromapFormatEXT value )
+ {
+ switch ( value )
+ {
+ case OpacityMicromapFormatEXT::e2State: return "2State";
+ case OpacityMicromapFormatEXT::e4State: return "4State";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpacityMicromapSpecialIndexEXT value )
+ {
+ switch ( value )
+ {
+ case OpacityMicromapSpecialIndexEXT::eFullyTransparent: return "FullyTransparent";
+ case OpacityMicromapSpecialIndexEXT::eFullyOpaque: return "FullyOpaque";
+ case OpacityMicromapSpecialIndexEXT::eFullyUnknownTransparent: return "FullyUnknownTransparent";
+ case OpacityMicromapSpecialIndexEXT::eFullyUnknownOpaque: return "FullyUnknownOpaque";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
//=== VK_EXT_subpass_merge_feedback ===
VULKAN_HPP_INLINE std::string to_string( SubpassMergeStatusEXT value )
@@ -7727,5 +8091,85 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowUsageFlagBitsNV::eUnknown: return "Unknown";
+ case OpticalFlowUsageFlagBitsNV::eInput: return "Input";
+ case OpticalFlowUsageFlagBitsNV::eOutput: return "Output";
+ case OpticalFlowUsageFlagBitsNV::eHint: return "Hint";
+ case OpticalFlowUsageFlagBitsNV::eCost: return "Cost";
+ case OpticalFlowUsageFlagBitsNV::eGlobalFlow: return "GlobalFlow";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowGridSizeFlagBitsNV::eUnknown: return "Unknown";
+ case OpticalFlowGridSizeFlagBitsNV::e1X1: return "1X1";
+ case OpticalFlowGridSizeFlagBitsNV::e2X2: return "2X2";
+ case OpticalFlowGridSizeFlagBitsNV::e4X4: return "4X4";
+ case OpticalFlowGridSizeFlagBitsNV::e8X8: return "8X8";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowPerformanceLevelNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowPerformanceLevelNV::eUnknown: return "Unknown";
+ case OpticalFlowPerformanceLevelNV::eSlow: return "Slow";
+ case OpticalFlowPerformanceLevelNV::eMedium: return "Medium";
+ case OpticalFlowPerformanceLevelNV::eFast: return "Fast";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionBindingPointNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowSessionBindingPointNV::eUnknown: return "Unknown";
+ case OpticalFlowSessionBindingPointNV::eInput: return "Input";
+ case OpticalFlowSessionBindingPointNV::eReference: return "Reference";
+ case OpticalFlowSessionBindingPointNV::eHint: return "Hint";
+ case OpticalFlowSessionBindingPointNV::eFlowVector: return "FlowVector";
+ case OpticalFlowSessionBindingPointNV::eBackwardFlowVector: return "BackwardFlowVector";
+ case OpticalFlowSessionBindingPointNV::eCost: return "Cost";
+ case OpticalFlowSessionBindingPointNV::eBackwardCost: return "BackwardCost";
+ case OpticalFlowSessionBindingPointNV::eGlobalFlow: return "GlobalFlow";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowSessionCreateFlagBitsNV::eEnableHint: return "EnableHint";
+ case OpticalFlowSessionCreateFlagBitsNV::eEnableCost: return "EnableCost";
+ case OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow: return "EnableGlobalFlow";
+ case OpticalFlowSessionCreateFlagBitsNV::eAllowRegions: return "AllowRegions";
+ case OpticalFlowSessionCreateFlagBitsNV::eBothDirections: return "BothDirections";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints: return "DisableTemporalHints";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
} // namespace VULKAN_HPP_NAMESPACE
#endif