diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 197d9ca..ca1df90 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -625,6 +625,13 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagBitsEXT; using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT; + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceEXT; + using VULKAN_HPP_NAMESPACE::SpirvResourceTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::SpirvResourceTypeFlagsEXT; + using VULKAN_HPP_NAMESPACE::TensorViewCreateFlagBitsARM; + using VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM; + //=== VK_EXT_blend_operation_advanced === using VULKAN_HPP_NAMESPACE::BlendOverlapEXT; @@ -890,8 +897,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::TensorTilingARM; using VULKAN_HPP_NAMESPACE::TensorUsageFlagBitsARM; using VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM; - using VULKAN_HPP_NAMESPACE::TensorViewCreateFlagBitsARM; - using VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM; //=== VK_NV_optical_flow === using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagBitsNV; @@ -1647,6 +1652,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_NAMESPACE::EXTDescriptorHeapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDescriptorHeapSpecVersion; + //=== VK_AMD_mixed_attachment_samples === using VULKAN_HPP_NAMESPACE::AMDMixedAttachmentSamplesExtensionName; using VULKAN_HPP_NAMESPACE::AMDMixedAttachmentSamplesSpecVersion; @@ -1779,6 +1788,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::QCOMRenderPassShaderResolveExtensionName; using VULKAN_HPP_NAMESPACE::QCOMRenderPassShaderResolveSpecVersion; + //=== VK_QCOM_cooperative_matrix_conversion === + using VULKAN_HPP_NAMESPACE::QCOMCooperativeMatrixConversionExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMCooperativeMatrixConversionSpecVersion; + //=== VK_EXT_global_priority === using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityExtensionName; using VULKAN_HPP_NAMESPACE::EXTGlobalPrioritySpecVersion; @@ -2704,6 +2717,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::EXTDynamicRenderingUnusedAttachmentsExtensionName; using VULKAN_HPP_NAMESPACE::EXTDynamicRenderingUnusedAttachmentsSpecVersion; + //=== VK_KHR_internally_synchronized_queues === + using VULKAN_HPP_NAMESPACE::KHRInternallySynchronizedQueuesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRInternallySynchronizedQueuesSpecVersion; + //=== VK_NV_low_latency2 === using VULKAN_HPP_NAMESPACE::NVLowLatency2ExtensionName; using VULKAN_HPP_NAMESPACE::NVLowLatency2SpecVersion; @@ -2902,6 +2919,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::KHRShaderFmaExtensionName; using VULKAN_HPP_NAMESPACE::KHRShaderFmaSpecVersion; + //=== VK_NV_push_constant_bank === + using VULKAN_HPP_NAMESPACE::NVPushConstantBankExtensionName; + using VULKAN_HPP_NAMESPACE::NVPushConstantBankSpecVersion; + //=== VK_EXT_ray_tracing_invocation_reorder === using VULKAN_HPP_NAMESPACE::EXTRayTracingInvocationReorderExtensionName; using VULKAN_HPP_NAMESPACE::EXTRayTracingInvocationReorderSpecVersion; @@ -3020,6 +3041,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVComputeOccupancyPriorityExtensionName; using VULKAN_HPP_NAMESPACE::NVComputeOccupancyPrioritySpecVersion; + //=== VK_EXT_shader_subgroup_partitioned === + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupPartitionedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupPartitionedSpecVersion; + //======================== //=== CONSTEXPR VALUEs === //======================== @@ -4005,6 +4030,36 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_NAMESPACE::BindHeapInfoEXT; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceDescriptorHeapInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceConstantOffsetEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceDataEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceHeapDataEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectAddressEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectIndexArrayEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectIndexEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourcePushIndexEXT; + using VULKAN_HPP_NAMESPACE::DescriptorMappingSourceShaderRecordIndexEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetAndBindingMappingEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressRangeEXT; + using VULKAN_HPP_NAMESPACE::HostAddressRangeConstEXT; + using VULKAN_HPP_NAMESPACE::HostAddressRangeEXT; + using VULKAN_HPP_NAMESPACE::ImageDescriptorInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutPushDataTokenNV; + using VULKAN_HPP_NAMESPACE::OpaqueCaptureDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapTensorPropertiesARM; + using VULKAN_HPP_NAMESPACE::PushDataInfoEXT; + using VULKAN_HPP_NAMESPACE::ResourceDescriptorDataEXT; + using VULKAN_HPP_NAMESPACE::ResourceDescriptorInfoEXT; + using VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorIndexCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ShaderDescriptorSetAndBindingMappingInfoEXT; + using VULKAN_HPP_NAMESPACE::SubsampledImageFormatPropertiesEXT; + using VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM; + using VULKAN_HPP_NAMESPACE::TexelBufferDescriptorInfoEXT; + //=== VK_AMD_mixed_attachment_samples === using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; @@ -4127,6 +4182,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT; + //=== VK_QCOM_cooperative_matrix_conversion === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + //=== VK_EXT_external_memory_host === using VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT; using VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT; @@ -4793,7 +4851,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM; using VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM; using VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM; - using VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM; using VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM; //=== VK_EXT_shader_module_identifier === @@ -4941,6 +4998,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_dynamic_rendering_unused_attachments === using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + //=== VK_KHR_internally_synchronized_queues === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + //=== VK_NV_low_latency2 === using VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV; using VULKAN_HPP_NAMESPACE::LatencySleepInfoNV; @@ -5224,6 +5284,11 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_fma === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFmaFeaturesKHR; + //=== VK_NV_push_constant_bank === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushConstantBankFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushConstantBankPropertiesNV; + using VULKAN_HPP_NAMESPACE::PushConstantBankInfoNV; + //=== VK_EXT_ray_tracing_invocation_reorder === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesEXT; @@ -5352,6 +5417,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ComputeOccupancyPriorityParametersNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeOccupancyPriorityFeaturesNV; + //=== VK_EXT_shader_subgroup_partitioned === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + //=============== //=== HANDLEs === //=============== @@ -5416,6 +5484,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_utils === using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_NAMESPACE::TensorARM; + //=== VK_KHR_acceleration_structure === using VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; @@ -5532,6 +5603,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_utils === using VULKAN_HPP_NAMESPACE::UniqueDebugUtilsMessengerEXT; + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_NAMESPACE::UniqueTensorARM; + //=== VK_KHR_acceleration_structure === using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureKHR; @@ -5653,6 +5727,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_utils === using VULKAN_HPP_NAMESPACE::SharedDebugUtilsMessengerEXT; + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_NAMESPACE::SharedTensorARM; + //=== VK_KHR_acceleration_structure === using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureKHR; @@ -5865,6 +5942,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_utils === using VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT; + //=== VK_EXT_descriptor_heap === + using VULKAN_HPP_RAII_NAMESPACE::TensorARM; + //=== VK_KHR_acceleration_structure === using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR; @@ -6036,6 +6116,10 @@ export namespace std template <> struct hash; + //=== VK_EXT_descriptor_heap === + template <> + struct hash; + //=== VK_KHR_acceleration_structure === template <> struct hash; @@ -7362,6 +7446,64 @@ export namespace std struct hash; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + //=== VK_AMD_mixed_attachment_samples === template <> struct hash; @@ -7562,6 +7704,10 @@ export namespace std template <> struct hash; + //=== VK_QCOM_cooperative_matrix_conversion === + template <> + struct hash; + //=== VK_EXT_external_memory_host === template <> struct hash; @@ -8826,6 +8972,10 @@ export namespace std template <> struct hash; + //=== VK_KHR_internally_synchronized_queues === + template <> + struct hash; + //=== VK_NV_low_latency2 === template <> struct hash; @@ -9292,6 +9442,14 @@ export namespace std template <> struct hash; + //=== VK_NV_push_constant_bank === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + //=== VK_EXT_ray_tracing_invocation_reorder === template <> struct hash; @@ -9472,6 +9630,10 @@ export namespace std template <> struct hash; + //=== VK_EXT_shader_subgroup_partitioned === + template <> + struct hash; + //================================================================= //=== Required exports for VULKAN_HPP_NAMESPACE::StructureChain === //================================================================= @@ -9840,6 +10002,7 @@ export using ::PFN_vkDestroyCuModuleNVX; //=== VK_NVX_image_view_handle === + using ::PFN_vkGetDeviceCombinedImageSamplerIndexNVX; using ::PFN_vkGetImageViewAddressNVX; using ::PFN_vkGetImageViewHandle64NVX; using ::PFN_vkGetImageViewHandleNVX; @@ -10043,6 +10206,18 @@ export using ::PFN_vkGetExecutionGraphPipelineScratchSizeAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + using ::PFN_vkCmdBindResourceHeapEXT; + using ::PFN_vkCmdBindSamplerHeapEXT; + using ::PFN_vkCmdPushDataEXT; + using ::PFN_vkGetImageOpaqueCaptureDataEXT; + using ::PFN_vkGetPhysicalDeviceDescriptorSizeEXT; + using ::PFN_vkGetTensorOpaqueCaptureDataARM; + using ::PFN_vkRegisterCustomBorderColorEXT; + using ::PFN_vkUnregisterCustomBorderColorEXT; + using ::PFN_vkWriteResourceDescriptorsEXT; + using ::PFN_vkWriteSamplerDescriptorsEXT; + //=== VK_EXT_sample_locations === using ::PFN_vkCmdSetSampleLocationsEXT; using ::PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT; diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index ce2df09..215a43f 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -57,7 +57,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # endif #endif -VULKAN_HPP_STATIC_ASSERT( VK_HEADER_VERSION == 339, "Wrong VK_HEADER_VERSION!" ); +VULKAN_HPP_STATIC_ASSERT( VK_HEADER_VERSION == 342, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -1098,6 +1098,8 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkCmdBindInvocationMaskHUAWEI ) DECLARE_IS_DISPATCHED( vkCmdBindPipeline ) DECLARE_IS_DISPATCHED( vkCmdBindPipelineShaderGroupNV ) + DECLARE_IS_DISPATCHED( vkCmdBindResourceHeapEXT ) + DECLARE_IS_DISPATCHED( vkCmdBindSamplerHeapEXT ) DECLARE_IS_DISPATCHED( vkCmdBindShadersEXT ) DECLARE_IS_DISPATCHED( vkCmdBindShadingRateImageNV ) DECLARE_IS_DISPATCHED( vkCmdBindTileMemoryQCOM ) @@ -1217,6 +1219,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkCmdPushConstants ) DECLARE_IS_DISPATCHED( vkCmdPushConstants2 ) DECLARE_IS_DISPATCHED( vkCmdPushConstants2KHR ) + DECLARE_IS_DISPATCHED( vkCmdPushDataEXT ) DECLARE_IS_DISPATCHED( vkCmdPushDescriptorSet ) DECLARE_IS_DISPATCHED( vkCmdPushDescriptorSetKHR ) DECLARE_IS_DISPATCHED( vkCmdPushDescriptorSet2 ) @@ -1560,6 +1563,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkGetDeviceAccelerationStructureCompatibilityKHR ) DECLARE_IS_DISPATCHED( vkGetDeviceBufferMemoryRequirements ) DECLARE_IS_DISPATCHED( vkGetDeviceBufferMemoryRequirementsKHR ) + DECLARE_IS_DISPATCHED( vkGetDeviceCombinedImageSamplerIndexNVX ) DECLARE_IS_DISPATCHED( vkGetDeviceFaultInfoEXT ) DECLARE_IS_DISPATCHED( vkGetDeviceGroupPeerMemoryFeatures ) DECLARE_IS_DISPATCHED( vkGetDeviceGroupPeerMemoryFeaturesKHR ) @@ -1606,6 +1610,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkGetImageMemoryRequirements ) DECLARE_IS_DISPATCHED( vkGetImageMemoryRequirements2 ) DECLARE_IS_DISPATCHED( vkGetImageMemoryRequirements2KHR ) + DECLARE_IS_DISPATCHED( vkGetImageOpaqueCaptureDataEXT ) DECLARE_IS_DISPATCHED( vkGetImageOpaqueCaptureDescriptorDataEXT ) DECLARE_IS_DISPATCHED( vkGetImageSparseMemoryRequirements ) DECLARE_IS_DISPATCHED( vkGetImageSparseMemoryRequirements2 ) @@ -1646,6 +1651,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR ) DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceCooperativeMatrixPropertiesNV ) DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceCooperativeVectorPropertiesNV ) + DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceDescriptorSizeEXT ) DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceDirectFBPresentationSupportEXT ) DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceDisplayPlaneProperties2KHR ) DECLARE_IS_DISPATCHED( vkGetPhysicalDeviceDisplayPlanePropertiesKHR ) @@ -1754,6 +1760,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkGetSwapchainTimeDomainPropertiesEXT ) DECLARE_IS_DISPATCHED( vkGetSwapchainTimingPropertiesEXT ) DECLARE_IS_DISPATCHED( vkGetTensorMemoryRequirementsARM ) + DECLARE_IS_DISPATCHED( vkGetTensorOpaqueCaptureDataARM ) DECLARE_IS_DISPATCHED( vkGetTensorOpaqueCaptureDescriptorDataARM ) DECLARE_IS_DISPATCHED( vkGetTensorViewOpaqueCaptureDescriptorDataARM ) DECLARE_IS_DISPATCHED( vkGetValidationCacheDataEXT ) @@ -1788,6 +1795,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkQueueSubmit2 ) DECLARE_IS_DISPATCHED( vkQueueSubmit2KHR ) DECLARE_IS_DISPATCHED( vkQueueWaitIdle ) + DECLARE_IS_DISPATCHED( vkRegisterCustomBorderColorEXT ) DECLARE_IS_DISPATCHED( vkRegisterDeviceEventEXT ) DECLARE_IS_DISPATCHED( vkRegisterDisplayEventEXT ) DECLARE_IS_DISPATCHED( vkReleaseCapturedPipelineDataKHR ) @@ -1828,6 +1836,7 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkUnmapMemory ) DECLARE_IS_DISPATCHED( vkUnmapMemory2 ) DECLARE_IS_DISPATCHED( vkUnmapMemory2KHR ) + DECLARE_IS_DISPATCHED( vkUnregisterCustomBorderColorEXT ) DECLARE_IS_DISPATCHED( vkUpdateDescriptorSetWithTemplate ) DECLARE_IS_DISPATCHED( vkUpdateDescriptorSetWithTemplateKHR ) DECLARE_IS_DISPATCHED( vkUpdateDescriptorSets ) @@ -1841,6 +1850,8 @@ namespace VULKAN_HPP_NAMESPACE DECLARE_IS_DISPATCHED( vkWaitSemaphoresKHR ) DECLARE_IS_DISPATCHED( vkWriteAccelerationStructuresPropertiesKHR ) DECLARE_IS_DISPATCHED( vkWriteMicromapsPropertiesEXT ) + DECLARE_IS_DISPATCHED( vkWriteResourceDescriptorsEXT ) + DECLARE_IS_DISPATCHED( vkWriteSamplerDescriptorsEXT ) #undef DECLARE_IS_DISPATCHED @@ -3922,6 +3933,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); } + uint64_t vkGetDeviceCombinedImageSamplerIndexNVX( VkDevice device, uint64_t imageViewIndex, uint64_t samplerIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceCombinedImageSamplerIndexNVX( device, imageViewIndex, samplerIndex ); + } + //=== VK_AMD_draw_indirect_count === void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, @@ -4688,6 +4704,73 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + VkResult vkWriteSamplerDescriptorsEXT( VkDevice device, + uint32_t samplerCount, + const VkSamplerCreateInfo * pSamplers, + const VkHostAddressRangeEXT * pDescriptors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteSamplerDescriptorsEXT( device, samplerCount, pSamplers, pDescriptors ); + } + + VkResult vkWriteResourceDescriptorsEXT( VkDevice device, + uint32_t resourceCount, + const VkResourceDescriptorInfoEXT * pResources, + const VkHostAddressRangeEXT * pDescriptors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteResourceDescriptorsEXT( device, resourceCount, pResources, pDescriptors ); + } + + void vkCmdBindSamplerHeapEXT( VkCommandBuffer commandBuffer, const VkBindHeapInfoEXT * pBindInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindSamplerHeapEXT( commandBuffer, pBindInfo ); + } + + void vkCmdBindResourceHeapEXT( VkCommandBuffer commandBuffer, const VkBindHeapInfoEXT * pBindInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindResourceHeapEXT( commandBuffer, pBindInfo ); + } + + void vkCmdPushDataEXT( VkCommandBuffer commandBuffer, const VkPushDataInfoEXT * pPushDataInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDataEXT( commandBuffer, pPushDataInfo ); + } + + VkResult vkGetImageOpaqueCaptureDataEXT( VkDevice device, + uint32_t imageCount, + const VkImage * pImages, + VkHostAddressRangeEXT * pDatas ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageOpaqueCaptureDataEXT( device, imageCount, pImages, pDatas ); + } + + VkDeviceSize vkGetPhysicalDeviceDescriptorSizeEXT( VkPhysicalDevice physicalDevice, VkDescriptorType descriptorType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDescriptorSizeEXT( physicalDevice, descriptorType ); + } + + VkResult vkRegisterCustomBorderColorEXT( VkDevice device, + const VkSamplerCustomBorderColorCreateInfoEXT * pBorderColor, + VkBool32 requestIndex, + uint32_t * pIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterCustomBorderColorEXT( device, pBorderColor, requestIndex, pIndex ); + } + + void vkUnregisterCustomBorderColorEXT( VkDevice device, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnregisterCustomBorderColorEXT( device, index ); + } + + VkResult vkGetTensorOpaqueCaptureDataARM( VkDevice device, + uint32_t tensorCount, + const VkTensorARM * pTensors, + VkHostAddressRangeEXT * pDatas ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetTensorOpaqueCaptureDataARM( device, tensorCount, pTensors, pDatas ); + } + //=== VK_EXT_sample_locations === void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT @@ -9152,6 +9235,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueExtensionName = VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorHeapSpecVersion = VK_EXT_DESCRIPTOR_HEAP_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorHeapExtensionName = VK_EXT_DESCRIPTOR_HEAP_EXTENSION_NAME; + //=== VK_AMD_mixed_attachment_samples === VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesSpecVersion = VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesExtensionName = VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME; @@ -9284,6 +9371,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveSpecVersion = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveExtensionName = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME; + //=== VK_QCOM_cooperative_matrix_conversion === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMCooperativeMatrixConversionSpecVersion = VK_QCOM_COOPERATIVE_MATRIX_CONVERSION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMCooperativeMatrixConversionExtensionName = VK_QCOM_COOPERATIVE_MATRIX_CONVERSION_EXTENSION_NAME; + //=== VK_EXT_global_priority === VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityExtensionName = VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME; @@ -9733,7 +9824,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2ExtensionName = VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME; //=== VK_EXT_descriptor_buffer === - VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferSpecVersion = VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION; + VULKAN_HPP_DEPRECATED( "The VK_EXT_descriptor_buffer extension has been deprecated by VK_EXT_descriptor_heap." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferSpecVersion = VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION; + VULKAN_HPP_DEPRECATED( "The VK_EXT_descriptor_buffer extension has been deprecated by VK_EXT_descriptor_heap." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferExtensionName = VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME; //=== VK_EXT_graphics_pipeline_library === @@ -10206,6 +10299,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsSpecVersion = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsExtensionName = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME; + //=== VK_KHR_internally_synchronized_queues === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRInternallySynchronizedQueuesSpecVersion = VK_KHR_INTERNALLY_SYNCHRONIZED_QUEUES_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRInternallySynchronizedQueuesExtensionName = VK_KHR_INTERNALLY_SYNCHRONIZED_QUEUES_EXTENSION_NAME; + //=== VK_NV_low_latency2 === VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2SpecVersion = VK_NV_LOW_LATENCY_2_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2ExtensionName = VK_NV_LOW_LATENCY_2_EXTENSION_NAME; @@ -10400,6 +10497,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFmaSpecVersion = VK_KHR_SHADER_FMA_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFmaExtensionName = VK_KHR_SHADER_FMA_EXTENSION_NAME; + //=== VK_NV_push_constant_bank === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPushConstantBankSpecVersion = VK_NV_PUSH_CONSTANT_BANK_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPushConstantBankExtensionName = VK_NV_PUSH_CONSTANT_BANK_EXTENSION_NAME; + //=== VK_EXT_ray_tracing_invocation_reorder === VULKAN_HPP_CONSTEXPR_INLINE auto EXTRayTracingInvocationReorderSpecVersion = VK_EXT_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto EXTRayTracingInvocationReorderExtensionName = VK_EXT_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME; @@ -10514,6 +10615,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeOccupancyPrioritySpecVersion = VK_NV_COMPUTE_OCCUPANCY_PRIORITY_SPEC_VERSION; VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeOccupancyPriorityExtensionName = VK_NV_COMPUTE_OCCUPANCY_PRIORITY_EXTENSION_NAME; + //=== VK_EXT_shader_subgroup_partitioned === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupPartitionedSpecVersion = VK_EXT_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupPartitionedExtensionName = VK_EXT_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -13609,6 +13714,24 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === template <> @@ -13732,6 +13855,115 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_AMD_mixed_attachment_samples === template <> struct StructExtends @@ -14192,6 +14424,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_QCOM_cooperative_matrix_conversion === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_external_memory_host === template <> struct StructExtends @@ -18692,6 +18943,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_KHR_internally_synchronized_queues === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_low_latency2 === template <> struct StructExtends @@ -20201,6 +20471,70 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_push_constant_bank === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_ray_tracing_invocation_reorder === template <> struct StructExtends @@ -20933,6 +21267,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_shader_subgroup_partitioned === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE namespace detail @@ -21436,9 +21789,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetDeviceCombinedImageSamplerIndexNVX vkGetDeviceCombinedImageSamplerIndexNVX = 0; //=== VK_AMD_draw_indirect_count === PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; @@ -21672,6 +22026,18 @@ namespace VULKAN_HPP_NAMESPACE PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + PFN_vkWriteSamplerDescriptorsEXT vkWriteSamplerDescriptorsEXT = 0; + PFN_vkWriteResourceDescriptorsEXT vkWriteResourceDescriptorsEXT = 0; + PFN_vkCmdBindSamplerHeapEXT vkCmdBindSamplerHeapEXT = 0; + PFN_vkCmdBindResourceHeapEXT vkCmdBindResourceHeapEXT = 0; + PFN_vkCmdPushDataEXT vkCmdPushDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDataEXT vkGetImageOpaqueCaptureDataEXT = 0; + PFN_vkGetPhysicalDeviceDescriptorSizeEXT vkGetPhysicalDeviceDescriptorSizeEXT = 0; + PFN_vkRegisterCustomBorderColorEXT vkRegisterCustomBorderColorEXT = 0; + PFN_vkUnregisterCustomBorderColorEXT vkUnregisterCustomBorderColorEXT = 0; + PFN_vkGetTensorOpaqueCaptureDataARM vkGetTensorOpaqueCaptureDataARM = 0; + //=== VK_EXT_sample_locations === PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; @@ -22815,6 +23181,8 @@ namespace VULKAN_HPP_NAMESPACE vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandle64NVX" ) ); vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + vkGetDeviceCombinedImageSamplerIndexNVX = + PFN_vkGetDeviceCombinedImageSamplerIndexNVX( vkGetInstanceProcAddr( instance, "vkGetDeviceCombinedImageSamplerIndexNVX" ) ); //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); @@ -23100,6 +23468,19 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + vkWriteSamplerDescriptorsEXT = PFN_vkWriteSamplerDescriptorsEXT( vkGetInstanceProcAddr( instance, "vkWriteSamplerDescriptorsEXT" ) ); + vkWriteResourceDescriptorsEXT = PFN_vkWriteResourceDescriptorsEXT( vkGetInstanceProcAddr( instance, "vkWriteResourceDescriptorsEXT" ) ); + vkCmdBindSamplerHeapEXT = PFN_vkCmdBindSamplerHeapEXT( vkGetInstanceProcAddr( instance, "vkCmdBindSamplerHeapEXT" ) ); + vkCmdBindResourceHeapEXT = PFN_vkCmdBindResourceHeapEXT( vkGetInstanceProcAddr( instance, "vkCmdBindResourceHeapEXT" ) ); + vkCmdPushDataEXT = PFN_vkCmdPushDataEXT( vkGetInstanceProcAddr( instance, "vkCmdPushDataEXT" ) ); + vkGetImageOpaqueCaptureDataEXT = PFN_vkGetImageOpaqueCaptureDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDataEXT" ) ); + vkGetPhysicalDeviceDescriptorSizeEXT = + PFN_vkGetPhysicalDeviceDescriptorSizeEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDescriptorSizeEXT" ) ); + vkRegisterCustomBorderColorEXT = PFN_vkRegisterCustomBorderColorEXT( vkGetInstanceProcAddr( instance, "vkRegisterCustomBorderColorEXT" ) ); + vkUnregisterCustomBorderColorEXT = PFN_vkUnregisterCustomBorderColorEXT( vkGetInstanceProcAddr( instance, "vkUnregisterCustomBorderColorEXT" ) ); + vkGetTensorOpaqueCaptureDataARM = PFN_vkGetTensorOpaqueCaptureDataARM( vkGetInstanceProcAddr( instance, "vkGetTensorOpaqueCaptureDataARM" ) ); + //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); vkGetPhysicalDeviceMultisamplePropertiesEXT = @@ -24290,6 +24671,8 @@ namespace VULKAN_HPP_NAMESPACE vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetDeviceCombinedImageSamplerIndexNVX = + PFN_vkGetDeviceCombinedImageSamplerIndexNVX( vkGetDeviceProcAddr( device, "vkGetDeviceCombinedImageSamplerIndexNVX" ) ); //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); @@ -24462,6 +24845,17 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + vkWriteSamplerDescriptorsEXT = PFN_vkWriteSamplerDescriptorsEXT( vkGetDeviceProcAddr( device, "vkWriteSamplerDescriptorsEXT" ) ); + vkWriteResourceDescriptorsEXT = PFN_vkWriteResourceDescriptorsEXT( vkGetDeviceProcAddr( device, "vkWriteResourceDescriptorsEXT" ) ); + vkCmdBindSamplerHeapEXT = PFN_vkCmdBindSamplerHeapEXT( vkGetDeviceProcAddr( device, "vkCmdBindSamplerHeapEXT" ) ); + vkCmdBindResourceHeapEXT = PFN_vkCmdBindResourceHeapEXT( vkGetDeviceProcAddr( device, "vkCmdBindResourceHeapEXT" ) ); + vkCmdPushDataEXT = PFN_vkCmdPushDataEXT( vkGetDeviceProcAddr( device, "vkCmdPushDataEXT" ) ); + vkGetImageOpaqueCaptureDataEXT = PFN_vkGetImageOpaqueCaptureDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDataEXT" ) ); + vkRegisterCustomBorderColorEXT = PFN_vkRegisterCustomBorderColorEXT( vkGetDeviceProcAddr( device, "vkRegisterCustomBorderColorEXT" ) ); + vkUnregisterCustomBorderColorEXT = PFN_vkUnregisterCustomBorderColorEXT( vkGetDeviceProcAddr( device, "vkUnregisterCustomBorderColorEXT" ) ); + vkGetTensorOpaqueCaptureDataARM = PFN_vkGetTensorOpaqueCaptureDataARM( vkGetDeviceProcAddr( device, "vkGetTensorOpaqueCaptureDataARM" ) ); + //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index fc06cde..edeb412 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -66,7 +66,7 @@ extern "C" { //#define VK_API_VERSION VK_MAKE_API_VERSION(0, 1, 0, 0) // Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 339 +#define VK_HEADER_VERSION 342 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 4, VK_HEADER_VERSION) @@ -655,6 +655,21 @@ typedef enum VkStructureType { #ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, #endif + VK_STRUCTURE_TYPE_TEXEL_BUFFER_DESCRIPTOR_INFO_EXT = 1000135000, + VK_STRUCTURE_TYPE_IMAGE_DESCRIPTOR_INFO_EXT = 1000135001, + VK_STRUCTURE_TYPE_RESOURCE_DESCRIPTOR_INFO_EXT = 1000135002, + VK_STRUCTURE_TYPE_BIND_HEAP_INFO_EXT = 1000135003, + VK_STRUCTURE_TYPE_PUSH_DATA_INFO_EXT = 1000135004, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_AND_BINDING_MAPPING_EXT = 1000135005, + VK_STRUCTURE_TYPE_SHADER_DESCRIPTOR_SET_AND_BINDING_MAPPING_INFO_EXT = 1000135006, + VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DATA_CREATE_INFO_EXT = 1000135007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_HEAP_PROPERTIES_EXT = 1000135008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_HEAP_FEATURES_EXT = 1000135009, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_DESCRIPTOR_HEAP_INFO_EXT = 1000135010, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_INDEX_CREATE_INFO_EXT = 1000135011, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_PUSH_DATA_TOKEN_NV = 1000135012, + VK_STRUCTURE_TYPE_SUBSAMPLED_IMAGE_FORMAT_PROPERTIES_EXT = 1000135013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_HEAP_TENSOR_PROPERTIES_ARM = 1000135014, VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_BFLOAT16_FEATURES_KHR = 1000141000, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, @@ -723,6 +738,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000, VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_CONVERSION_FEATURES_QCOM = 1000172000, VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, @@ -1163,6 +1179,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT = 1000498000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT = 1000499000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INTERNALLY_SYNCHRONIZED_QUEUES_FEATURES_KHR = 1000504000, VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV = 1000505000, VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV = 1000505001, VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV = 1000505002, @@ -1323,6 +1340,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA = 1000575001, VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FMA_FEATURES_KHR = 1000579000, + VK_STRUCTURE_TYPE_PUSH_CONSTANT_BANK_INFO_NV = 1000580000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_CONSTANT_BANK_FEATURES_NV = 1000580001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_CONSTANT_BANK_PROPERTIES_NV = 1000580002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_EXT = 1000581000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_EXT = 1000581001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, @@ -1385,6 +1405,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_UNIFORM_BUFFER_UNSIZED_ARRAY_FEATURES_EXT = 1000642000, VK_STRUCTURE_TYPE_COMPUTE_OCCUPANCY_PRIORITY_PARAMETERS_NV = 1000645000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_OCCUPANCY_PRIORITY_FEATURES_NV = 1000645001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_PARTITIONED_FEATURES_EXT = 1000662000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a legacy alias @@ -2712,9 +2733,9 @@ typedef enum VkImageCreateFlagBits { VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, + VK_IMAGE_CREATE_DESCRIPTOR_HEAP_CAPTURE_REPLAY_BIT_EXT = 0x00010000, VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, - VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000, VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000, VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000, VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00100000, @@ -2725,6 +2746,7 @@ typedef enum VkImageCreateFlagBits { VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = VK_IMAGE_CREATE_DESCRIPTOR_HEAP_CAPTURE_REPLAY_BIT_EXT, VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_EXT, VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageCreateFlagBits; @@ -2820,6 +2842,7 @@ typedef VkFlags VkDeviceCreateFlags; typedef enum VkDeviceQueueCreateFlagBits { VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_INTERNALLY_SYNCHRONIZED_BIT_KHR = 0x00000004, VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkDeviceQueueCreateFlagBits; typedef VkFlags VkDeviceQueueCreateFlags; @@ -2958,6 +2981,7 @@ typedef enum VkBufferUsageFlagBits { #ifdef VK_ENABLE_BETA_EXTENSIONS VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000, #endif + VK_BUFFER_USAGE_DESCRIPTOR_HEAP_BIT_EXT = 0x10000000, VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, @@ -5543,7 +5567,8 @@ typedef enum VkSubgroupFeatureFlagBits { VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, VK_SUBGROUP_FEATURE_ROTATE_BIT = 0x00000200, VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT = 0x00000400, - VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_EXT = 0x00000100, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_EXT, VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_BIT, VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -7116,6 +7141,8 @@ static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000 static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SAMPLER_HEAP_READ_BIT_EXT = 0x200000000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_RESOURCE_HEAP_READ_BIT_EXT = 0x400000000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_TILE_ATTACHMENT_READ_BIT_QCOM = 0x8000000000000ULL; @@ -7830,7 +7857,7 @@ typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); @@ -8129,6 +8156,7 @@ static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT #ifdef VK_ENABLE_BETA_EXTENSIONS static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; #endif +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_DESCRIPTOR_HEAP_BIT_EXT = 0x10000000ULL; static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; @@ -8190,6 +8218,7 @@ static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONL #ifdef VK_ENABLE_BETA_EXTENSIONS static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; #endif +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DESCRIPTOR_HEAP_BIT_EXT = 0x1000000000ULL; static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_BUILT_IN_PRIMITIVES_BIT_KHR = 0x00001000ULL; static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV = 0x200000000ULL; static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; @@ -12243,7 +12272,7 @@ typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); #ifndef VK_NO_PROTOTYPES #ifndef VK_ONLY_EXPORTED_PROTOTYPES @@ -12927,6 +12956,18 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesKHR( #endif +// VK_KHR_internally_synchronized_queues is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_internally_synchronized_queues 1 +#define VK_KHR_INTERNALLY_SYNCHRONIZED_QUEUES_SPEC_VERSION 1 +#define VK_KHR_INTERNALLY_SYNCHRONIZED_QUEUES_EXTENSION_NAME "VK_KHR_internally_synchronized_queues" +typedef struct VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 internallySynchronizedQueues; +} VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + + + // VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_cooperative_matrix 1 #define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 @@ -14485,7 +14526,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( // VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. #define VK_NVX_image_view_handle 1 -#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 3 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 4 #define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" typedef struct VkImageViewHandleInfoNVX { VkStructureType sType; @@ -14505,6 +14546,7 @@ typedef struct VkImageViewAddressPropertiesNVX { typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); typedef uint64_t (VKAPI_PTR *PFN_vkGetImageViewHandle64NVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceCombinedImageSamplerIndexNVX)(VkDevice device, uint64_t imageViewIndex, uint64_t samplerIndex); #ifndef VK_NO_PROTOTYPES #ifndef VK_ONLY_EXPORTED_PROTOTYPES @@ -14525,6 +14567,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); #endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceCombinedImageSamplerIndexNVX( + VkDevice device, + uint64_t imageViewIndex, + uint64_t samplerIndex); +#endif #endif @@ -15494,6 +15543,380 @@ typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFil #define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" +// VK_EXT_descriptor_heap is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_heap 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkTensorARM) +#define VK_EXT_DESCRIPTOR_HEAP_SPEC_VERSION 1 +#define VK_EXT_DESCRIPTOR_HEAP_EXTENSION_NAME "VK_EXT_descriptor_heap" + +typedef enum VkDescriptorMappingSourceEXT { + VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_CONSTANT_OFFSET_EXT = 0, + VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_PUSH_INDEX_EXT = 1, + VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_INDIRECT_INDEX_EXT = 2, + VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_INDIRECT_INDEX_ARRAY_EXT = 3, + VK_DESCRIPTOR_MAPPING_SOURCE_RESOURCE_HEAP_DATA_EXT = 4, + VK_DESCRIPTOR_MAPPING_SOURCE_PUSH_DATA_EXT = 5, + VK_DESCRIPTOR_MAPPING_SOURCE_PUSH_ADDRESS_EXT = 6, + VK_DESCRIPTOR_MAPPING_SOURCE_INDIRECT_ADDRESS_EXT = 7, + VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_SHADER_RECORD_INDEX_EXT = 8, + VK_DESCRIPTOR_MAPPING_SOURCE_SHADER_RECORD_DATA_EXT = 9, + VK_DESCRIPTOR_MAPPING_SOURCE_SHADER_RECORD_ADDRESS_EXT = 10, + VK_DESCRIPTOR_MAPPING_SOURCE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDescriptorMappingSourceEXT; +typedef VkFlags64 VkTensorViewCreateFlagsARM; + +// Flag bits for VkTensorViewCreateFlagBitsARM +typedef VkFlags64 VkTensorViewCreateFlagBitsARM; +static const VkTensorViewCreateFlagBitsARM VK_TENSOR_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM = 0x00000001ULL; + + +typedef enum VkSpirvResourceTypeFlagBitsEXT { + VK_SPIRV_RESOURCE_TYPE_ALL_EXT = 0x7FFFFFFF, + VK_SPIRV_RESOURCE_TYPE_SAMPLER_BIT_EXT = 0x00000001, + VK_SPIRV_RESOURCE_TYPE_SAMPLED_IMAGE_BIT_EXT = 0x00000002, + VK_SPIRV_RESOURCE_TYPE_READ_ONLY_IMAGE_BIT_EXT = 0x00000004, + VK_SPIRV_RESOURCE_TYPE_READ_WRITE_IMAGE_BIT_EXT = 0x00000008, + VK_SPIRV_RESOURCE_TYPE_COMBINED_SAMPLED_IMAGE_BIT_EXT = 0x00000010, + VK_SPIRV_RESOURCE_TYPE_UNIFORM_BUFFER_BIT_EXT = 0x00000020, + VK_SPIRV_RESOURCE_TYPE_READ_ONLY_STORAGE_BUFFER_BIT_EXT = 0x00000040, + VK_SPIRV_RESOURCE_TYPE_READ_WRITE_STORAGE_BUFFER_BIT_EXT = 0x00000080, + VK_SPIRV_RESOURCE_TYPE_ACCELERATION_STRUCTURE_BIT_EXT = 0x00000100, + VK_SPIRV_RESOURCE_TYPE_TENSOR_BIT_ARM = 0x00000200, + VK_SPIRV_RESOURCE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSpirvResourceTypeFlagBitsEXT; +typedef VkFlags VkSpirvResourceTypeFlagsEXT; +typedef struct VkHostAddressRangeEXT { + void* address; + size_t size; +} VkHostAddressRangeEXT; + +typedef struct VkHostAddressRangeConstEXT { + const void* address; + size_t size; +} VkHostAddressRangeConstEXT; + +typedef struct VkDeviceAddressRangeEXT { + VkDeviceAddress address; + VkDeviceSize size; +} VkDeviceAddressRangeEXT; + +typedef struct VkTexelBufferDescriptorInfoEXT { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkDeviceAddressRangeEXT addressRange; +} VkTexelBufferDescriptorInfoEXT; + +typedef struct VkImageDescriptorInfoEXT { + VkStructureType sType; + const void* pNext; + const VkImageViewCreateInfo* pView; + VkImageLayout layout; +} VkImageDescriptorInfoEXT; + +typedef struct VkTensorViewCreateInfoARM { + VkStructureType sType; + const void* pNext; + VkTensorViewCreateFlagsARM flags; + VkTensorARM tensor; + VkFormat format; +} VkTensorViewCreateInfoARM; + +typedef union VkResourceDescriptorDataEXT { + const VkImageDescriptorInfoEXT* pImage; + const VkTexelBufferDescriptorInfoEXT* pTexelBuffer; + const VkDeviceAddressRangeEXT* pAddressRange; + const VkTensorViewCreateInfoARM* pTensorARM; +} VkResourceDescriptorDataEXT; + +typedef struct VkResourceDescriptorInfoEXT { + VkStructureType sType; + const void* pNext; + VkDescriptorType type; + VkResourceDescriptorDataEXT data; +} VkResourceDescriptorInfoEXT; + +typedef struct VkBindHeapInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddressRangeEXT heapRange; + VkDeviceSize reservedRangeOffset; + VkDeviceSize reservedRangeSize; +} VkBindHeapInfoEXT; + +typedef struct VkPushDataInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t offset; + VkHostAddressRangeConstEXT data; +} VkPushDataInfoEXT; + +typedef struct VkDescriptorMappingSourceConstantOffsetEXT { + uint32_t heapOffset; + uint32_t heapArrayStride; + const VkSamplerCreateInfo* pEmbeddedSampler; + uint32_t samplerHeapOffset; + uint32_t samplerHeapArrayStride; +} VkDescriptorMappingSourceConstantOffsetEXT; + +typedef struct VkDescriptorMappingSourcePushIndexEXT { + uint32_t heapOffset; + uint32_t pushOffset; + uint32_t heapIndexStride; + uint32_t heapArrayStride; + const VkSamplerCreateInfo* pEmbeddedSampler; + VkBool32 useCombinedImageSamplerIndex; + uint32_t samplerHeapOffset; + uint32_t samplerPushOffset; + uint32_t samplerHeapIndexStride; + uint32_t samplerHeapArrayStride; +} VkDescriptorMappingSourcePushIndexEXT; + +typedef struct VkDescriptorMappingSourceIndirectIndexEXT { + uint32_t heapOffset; + uint32_t pushOffset; + uint32_t addressOffset; + uint32_t heapIndexStride; + uint32_t heapArrayStride; + const VkSamplerCreateInfo* pEmbeddedSampler; + VkBool32 useCombinedImageSamplerIndex; + uint32_t samplerHeapOffset; + uint32_t samplerPushOffset; + uint32_t samplerAddressOffset; + uint32_t samplerHeapIndexStride; + uint32_t samplerHeapArrayStride; +} VkDescriptorMappingSourceIndirectIndexEXT; + +typedef struct VkDescriptorMappingSourceHeapDataEXT { + uint32_t heapOffset; + uint32_t pushOffset; +} VkDescriptorMappingSourceHeapDataEXT; + +typedef struct VkDescriptorMappingSourceIndirectAddressEXT { + uint32_t pushOffset; + uint32_t addressOffset; +} VkDescriptorMappingSourceIndirectAddressEXT; + +typedef struct VkDescriptorMappingSourceShaderRecordIndexEXT { + uint32_t heapOffset; + uint32_t shaderRecordOffset; + uint32_t heapIndexStride; + uint32_t heapArrayStride; + const VkSamplerCreateInfo* pEmbeddedSampler; + VkBool32 useCombinedImageSamplerIndex; + uint32_t samplerHeapOffset; + uint32_t samplerShaderRecordOffset; + uint32_t samplerHeapIndexStride; + uint32_t samplerHeapArrayStride; +} VkDescriptorMappingSourceShaderRecordIndexEXT; + +typedef struct VkDescriptorMappingSourceIndirectIndexArrayEXT { + uint32_t heapOffset; + uint32_t pushOffset; + uint32_t addressOffset; + uint32_t heapIndexStride; + const VkSamplerCreateInfo* pEmbeddedSampler; + VkBool32 useCombinedImageSamplerIndex; + uint32_t samplerHeapOffset; + uint32_t samplerPushOffset; + uint32_t samplerAddressOffset; + uint32_t samplerHeapIndexStride; +} VkDescriptorMappingSourceIndirectIndexArrayEXT; + +typedef union VkDescriptorMappingSourceDataEXT { + VkDescriptorMappingSourceConstantOffsetEXT constantOffset; + VkDescriptorMappingSourcePushIndexEXT pushIndex; + VkDescriptorMappingSourceIndirectIndexEXT indirectIndex; + VkDescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray; + VkDescriptorMappingSourceHeapDataEXT heapData; + uint32_t pushDataOffset; + uint32_t pushAddressOffset; + VkDescriptorMappingSourceIndirectAddressEXT indirectAddress; + VkDescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex; + uint32_t shaderRecordDataOffset; + uint32_t shaderRecordAddressOffset; +} VkDescriptorMappingSourceDataEXT; + +typedef struct VkDescriptorSetAndBindingMappingEXT { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSet; + uint32_t firstBinding; + uint32_t bindingCount; + VkSpirvResourceTypeFlagsEXT resourceMask; + VkDescriptorMappingSourceEXT source; + VkDescriptorMappingSourceDataEXT sourceData; +} VkDescriptorSetAndBindingMappingEXT; + +typedef struct VkShaderDescriptorSetAndBindingMappingInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t mappingCount; + const VkDescriptorSetAndBindingMappingEXT* pMappings; +} VkShaderDescriptorSetAndBindingMappingInfoEXT; + +typedef struct VkOpaqueCaptureDataCreateInfoEXT { + VkStructureType sType; + const void* pNext; + const VkHostAddressRangeConstEXT* pData; +} VkOpaqueCaptureDataCreateInfoEXT; + +typedef struct VkPhysicalDeviceDescriptorHeapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 descriptorHeap; + VkBool32 descriptorHeapCaptureReplay; +} VkPhysicalDeviceDescriptorHeapFeaturesEXT; + +typedef struct VkPhysicalDeviceDescriptorHeapPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize samplerHeapAlignment; + VkDeviceSize resourceHeapAlignment; + VkDeviceSize maxSamplerHeapSize; + VkDeviceSize maxResourceHeapSize; + VkDeviceSize minSamplerHeapReservedRange; + VkDeviceSize minSamplerHeapReservedRangeWithEmbedded; + VkDeviceSize minResourceHeapReservedRange; + VkDeviceSize samplerDescriptorSize; + VkDeviceSize imageDescriptorSize; + VkDeviceSize bufferDescriptorSize; + VkDeviceSize samplerDescriptorAlignment; + VkDeviceSize imageDescriptorAlignment; + VkDeviceSize bufferDescriptorAlignment; + VkDeviceSize maxPushDataSize; + size_t imageCaptureReplayOpaqueDataSize; + uint32_t maxDescriptorHeapEmbeddedSamplers; + uint32_t samplerYcbcrConversionCount; + VkBool32 sparseDescriptorHeaps; + VkBool32 protectedDescriptorHeaps; +} VkPhysicalDeviceDescriptorHeapPropertiesEXT; + +typedef struct VkCommandBufferInheritanceDescriptorHeapInfoEXT { + VkStructureType sType; + const void* pNext; + const VkBindHeapInfoEXT* pSamplerHeapBindInfo; + const VkBindHeapInfoEXT* pResourceHeapBindInfo; +} VkCommandBufferInheritanceDescriptorHeapInfoEXT; + +typedef struct VkSamplerCustomBorderColorIndexCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; +} VkSamplerCustomBorderColorIndexCreateInfoEXT; + +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkIndirectCommandsLayoutPushDataTokenNV { + VkStructureType sType; + const void* pNext; + uint32_t pushDataOffset; + uint32_t pushDataSize; +} VkIndirectCommandsLayoutPushDataTokenNV; + +typedef struct VkSubsampledImageFormatPropertiesEXT { + VkStructureType sType; + const void* pNext; + uint32_t subsampledImageDescriptorCount; +} VkSubsampledImageFormatPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorHeapTensorPropertiesARM { + VkStructureType sType; + void* pNext; + VkDeviceSize tensorDescriptorSize; + VkDeviceSize tensorDescriptorAlignment; + size_t tensorCaptureReplayOpaqueDataSize; +} VkPhysicalDeviceDescriptorHeapTensorPropertiesARM; + +typedef VkResult (VKAPI_PTR *PFN_vkWriteSamplerDescriptorsEXT)(VkDevice device, uint32_t samplerCount, const VkSamplerCreateInfo* pSamplers, const VkHostAddressRangeEXT* pDescriptors); +typedef VkResult (VKAPI_PTR *PFN_vkWriteResourceDescriptorsEXT)(VkDevice device, uint32_t resourceCount, const VkResourceDescriptorInfoEXT* pResources, const VkHostAddressRangeEXT* pDescriptors); +typedef void (VKAPI_PTR *PFN_vkCmdBindSamplerHeapEXT)(VkCommandBuffer commandBuffer, const VkBindHeapInfoEXT* pBindInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindResourceHeapEXT)(VkCommandBuffer commandBuffer, const VkBindHeapInfoEXT* pBindInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDataEXT)(VkCommandBuffer commandBuffer, const VkPushDataInfoEXT* pPushDataInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDataEXT)(VkDevice device, uint32_t imageCount, const VkImage* pImages, VkHostAddressRangeEXT* pDatas); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetPhysicalDeviceDescriptorSizeEXT)(VkPhysicalDevice physicalDevice, VkDescriptorType descriptorType); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterCustomBorderColorEXT)(VkDevice device, const VkSamplerCustomBorderColorCreateInfoEXT* pBorderColor, VkBool32 requestIndex, uint32_t* pIndex); +typedef void (VKAPI_PTR *PFN_vkUnregisterCustomBorderColorEXT)(VkDevice device, uint32_t index); +typedef VkResult (VKAPI_PTR *PFN_vkGetTensorOpaqueCaptureDataARM)(VkDevice device, uint32_t tensorCount, const VkTensorARM* pTensors, VkHostAddressRangeEXT* pDatas); + +#ifndef VK_NO_PROTOTYPES +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkWriteSamplerDescriptorsEXT( + VkDevice device, + uint32_t samplerCount, + const VkSamplerCreateInfo* pSamplers, + const VkHostAddressRangeEXT* pDescriptors); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkWriteResourceDescriptorsEXT( + VkDevice device, + uint32_t resourceCount, + const VkResourceDescriptorInfoEXT* pResources, + const VkHostAddressRangeEXT* pDescriptors); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindSamplerHeapEXT( + VkCommandBuffer commandBuffer, + const VkBindHeapInfoEXT* pBindInfo); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindResourceHeapEXT( + VkCommandBuffer commandBuffer, + const VkBindHeapInfoEXT* pBindInfo); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDataEXT( + VkCommandBuffer commandBuffer, + const VkPushDataInfoEXT* pPushDataInfo); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDataEXT( + VkDevice device, + uint32_t imageCount, + const VkImage* pImages, + VkHostAddressRangeEXT* pDatas); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetPhysicalDeviceDescriptorSizeEXT( + VkPhysicalDevice physicalDevice, + VkDescriptorType descriptorType); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterCustomBorderColorEXT( + VkDevice device, + const VkSamplerCustomBorderColorCreateInfoEXT* pBorderColor, + VkBool32 requestIndex, + uint32_t* pIndex); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkUnregisterCustomBorderColorEXT( + VkDevice device, + uint32_t index); +#endif + +#ifndef VK_ONLY_EXPORTED_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetTensorOpaqueCaptureDataARM( + VkDevice device, + uint32_t tensorCount, + const VkTensorARM* pTensors, + VkHostAddressRangeEXT* pDatas); +#endif +#endif + + // VK_AMD_mixed_attachment_samples is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_mixed_attachment_samples 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 @@ -16463,6 +16886,18 @@ typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { #define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" +// VK_QCOM_cooperative_matrix_conversion is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_cooperative_matrix_conversion 1 +#define VK_QCOM_COOPERATIVE_MATRIX_CONVERSION_SPEC_VERSION 1 +#define VK_QCOM_COOPERATIVE_MATRIX_CONVERSION_EXTENSION_NAME "VK_QCOM_cooperative_matrix_conversion" +typedef struct VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrixConversion; +} VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + + + // VK_EXT_global_priority is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_global_priority 1 #define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 @@ -17227,7 +17662,7 @@ VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( // VK_EXT_fragment_density_map is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_fragment_density_map 1 -#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 3 #define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { VkStructureType sType; @@ -18030,6 +18465,7 @@ typedef enum VkIndirectCommandsTokenTypeNV { VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_DATA_NV = 1000135000, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004, @@ -18405,13 +18841,6 @@ typedef VkPhysicalDeviceRobustness2PropertiesKHR VkPhysicalDeviceRobustness2Prop #define VK_EXT_custom_border_color 1 #define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 #define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" -typedef struct VkSamplerCustomBorderColorCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkClearColorValue customBorderColor; - VkFormat format; -} VkSamplerCustomBorderColorCreateInfoEXT; - typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { VkStructureType sType; void* pNext; @@ -21252,7 +21681,6 @@ typedef struct VkDirectDriverLoadingListLUNARG { // VK_ARM_tensors is a preprocessor guard. Do not pass it to API calls. #define VK_ARM_tensors 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkTensorARM) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkTensorViewARM) #define VK_ARM_TENSORS_SPEC_VERSION 1 #define VK_ARM_TENSORS_EXTENSION_NAME "VK_ARM_tensors" @@ -21268,14 +21696,9 @@ typedef VkFlags64 VkTensorCreateFlagsARM; typedef VkFlags64 VkTensorCreateFlagBitsARM; static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_MUTABLE_FORMAT_BIT_ARM = 0x00000001ULL; static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_PROTECTED_BIT_ARM = 0x00000002ULL; +static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_DESCRIPTOR_HEAP_CAPTURE_REPLAY_BIT_ARM = 0x00000008ULL; static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM = 0x00000004ULL; -typedef VkFlags64 VkTensorViewCreateFlagsARM; - -// Flag bits for VkTensorViewCreateFlagBitsARM -typedef VkFlags64 VkTensorViewCreateFlagBitsARM; -static const VkTensorViewCreateFlagBitsARM VK_TENSOR_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM = 0x00000001ULL; - typedef VkFlags64 VkTensorUsageFlagsARM; // Flag bits for VkTensorUsageFlagBitsARM @@ -21307,14 +21730,6 @@ typedef struct VkTensorCreateInfoARM { const uint32_t* pQueueFamilyIndices; } VkTensorCreateInfoARM; -typedef struct VkTensorViewCreateInfoARM { - VkStructureType sType; - const void* pNext; - VkTensorViewCreateFlagsARM flags; - VkTensorARM tensor; - VkFormat format; -} VkTensorViewCreateInfoARM; - typedef struct VkTensorMemoryRequirementsInfoARM { VkStructureType sType; const void* pNext; @@ -21894,6 +22309,7 @@ typedef enum VkDepthClampModeEXT { typedef enum VkShaderCreateFlagBitsEXT { VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, + VK_SHADER_CREATE_DESCRIPTOR_HEAP_BIT_EXT = 0x00000400, VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000004, VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT = 0x00000008, @@ -23560,6 +23976,8 @@ typedef enum VkIndirectCommandsTokenTypeEXT { VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT = 7, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT = 8, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT = 9, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_DATA_EXT = 1000135000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_DATA_SEQUENCE_INDEX_EXT = 1000135001, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT = 1000202002, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT = 1000202003, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT = 1000328000, @@ -23858,6 +24276,33 @@ typedef struct VkImageAlignmentControlCreateInfoMESA { +// VK_NV_push_constant_bank is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_push_constant_bank 1 +#define VK_NV_PUSH_CONSTANT_BANK_SPEC_VERSION 1 +#define VK_NV_PUSH_CONSTANT_BANK_EXTENSION_NAME "VK_NV_push_constant_bank" +typedef struct VkPushConstantBankInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t bank; +} VkPushConstantBankInfoNV; + +typedef struct VkPhysicalDevicePushConstantBankFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 pushConstantBank; +} VkPhysicalDevicePushConstantBankFeaturesNV; + +typedef struct VkPhysicalDevicePushConstantBankPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsPushConstantBanks; + uint32_t maxComputePushConstantBanks; + uint32_t maxGraphicsPushDataBanks; + uint32_t maxComputePushDataBanks; +} VkPhysicalDevicePushConstantBankPropertiesNV; + + + // VK_EXT_ray_tracing_invocation_reorder is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_ray_tracing_invocation_reorder 1 #define VK_EXT_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1 @@ -24285,6 +24730,18 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetComputeOccupancyPriorityNV( #endif +// VK_EXT_shader_subgroup_partitioned is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_partitioned 1 +#define VK_EXT_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_EXT_shader_subgroup_partitioned" +typedef struct VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupPartitioned; +} VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + + + // VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index 2660d9c..bbb7544 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -993,6 +993,21 @@ namespace VULKAN_HPP_NAMESPACE eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTexelBufferDescriptorInfoEXT = VK_STRUCTURE_TYPE_TEXEL_BUFFER_DESCRIPTOR_INFO_EXT, + eImageDescriptorInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DESCRIPTOR_INFO_EXT, + eResourceDescriptorInfoEXT = VK_STRUCTURE_TYPE_RESOURCE_DESCRIPTOR_INFO_EXT, + eBindHeapInfoEXT = VK_STRUCTURE_TYPE_BIND_HEAP_INFO_EXT, + ePushDataInfoEXT = VK_STRUCTURE_TYPE_PUSH_DATA_INFO_EXT, + eDescriptorSetAndBindingMappingEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_AND_BINDING_MAPPING_EXT, + eShaderDescriptorSetAndBindingMappingInfoEXT = VK_STRUCTURE_TYPE_SHADER_DESCRIPTOR_SET_AND_BINDING_MAPPING_INFO_EXT, + eOpaqueCaptureDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DATA_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorHeapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_HEAP_PROPERTIES_EXT, + ePhysicalDeviceDescriptorHeapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_HEAP_FEATURES_EXT, + eCommandBufferInheritanceDescriptorHeapInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_DESCRIPTOR_HEAP_INFO_EXT, + eSamplerCustomBorderColorIndexCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_INDEX_CREATE_INFO_EXT, + eIndirectCommandsLayoutPushDataTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_PUSH_DATA_TOKEN_NV, + eSubsampledImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_SUBSAMPLED_IMAGE_FORMAT_PROPERTIES_EXT, + ePhysicalDeviceDescriptorHeapTensorPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_HEAP_TENSOR_PROPERTIES_ARM, eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, ePhysicalDeviceShaderBfloat16FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_BFLOAT16_FEATURES_KHR, @@ -1041,39 +1056,40 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, - ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, - ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, - ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, - eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, - eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, - eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, - eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, - eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, - eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, - eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, - eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, - ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, - eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, - eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, - ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, - ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, - ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, - eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, - eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, - eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, - ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, - ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, - ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, - ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, - eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR, - eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, - eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, - eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, - eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, - eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, - eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, - ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + ePhysicalDeviceCooperativeMatrixConversionFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_CONVERSION_FEATURES_QCOM, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR, + eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, + eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, + eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, #if defined( VK_USE_PLATFORM_GGP ) ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, #endif /*VK_USE_PLATFORM_GGP*/ @@ -1522,6 +1538,7 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM, ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT, ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT, + ePhysicalDeviceInternallySynchronizedQueuesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INTERNALLY_SYNCHRONIZED_QUEUES_FEATURES_KHR, eLatencySleepModeInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV, eLatencySleepInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV, eSetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV, @@ -1689,6 +1706,9 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceImageAlignmentControlPropertiesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA, eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA, ePhysicalDeviceShaderFmaFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FMA_FEATURES_KHR, + ePushConstantBankInfoNV = VK_STRUCTURE_TYPE_PUSH_CONSTANT_BANK_INFO_NV, + ePhysicalDevicePushConstantBankFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_CONSTANT_BANK_FEATURES_NV, + ePhysicalDevicePushConstantBankPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_CONSTANT_BANK_PROPERTIES_NV, ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_EXT, ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_EXT, ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT, @@ -1760,7 +1780,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDevicePipelineCacheIncrementalModeFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CACHE_INCREMENTAL_MODE_FEATURES_SEC, ePhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_UNIFORM_BUFFER_UNSIZED_ARRAY_FEATURES_EXT, eComputeOccupancyPriorityParametersNV = VK_STRUCTURE_TYPE_COMPUTE_OCCUPANCY_PRIORITY_PARAMETERS_NV, - ePhysicalDeviceComputeOccupancyPriorityFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_OCCUPANCY_PRIORITY_FEATURES_NV + ePhysicalDeviceComputeOccupancyPriorityFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_OCCUPANCY_PRIORITY_FEATURES_NV, + ePhysicalDeviceShaderSubgroupPartitionedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_PARTITIONED_FEATURES_EXT }; // wrapper class for enum VkObjectType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkObjectType.html @@ -2298,9 +2319,10 @@ namespace VULKAN_HPP_NAMESPACE eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eDescriptorHeapCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_HEAP_CAPTURE_REPLAY_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, - eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT, e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT, eVideoProfileIndependentKHR = VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR, @@ -2321,7 +2343,7 @@ namespace VULKAN_HPP_NAMESPACE ImageCreateFlagBits::eCubeCompatible | ImageCreateFlagBits::eAlias | ImageCreateFlagBits::eSplitInstanceBindRegions | ImageCreateFlagBits::e2DArrayCompatible | ImageCreateFlagBits::eBlockTexelViewCompatible | ImageCreateFlagBits::eExtendedUsage | ImageCreateFlagBits::eProtected | ImageCreateFlagBits::eDisjoint | ImageCreateFlagBits::eCornerSampledNV | - ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT | ImageCreateFlagBits::eSubsampledEXT | ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageCreateFlagBits::eDescriptorHeapCaptureReplayEXT | ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT | ImageCreateFlagBits::eSubsampledEXT | ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT | ImageCreateFlagBits::e2DViewCompatibleEXT | ImageCreateFlagBits::eVideoProfileIndependentKHR | ImageCreateFlagBits::eFragmentDensityMapOffsetEXT; }; @@ -2554,7 +2576,8 @@ namespace VULKAN_HPP_NAMESPACE // wrapper class for enum VkDeviceQueueCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateFlagBits.html enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags { - eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT, + eInternallySynchronizedKHR = VK_DEVICE_QUEUE_CREATE_INTERNALLY_SYNCHRONIZED_BIT_KHR }; // wrapper using for bitmask VkDeviceQueueCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateFlags.html @@ -2565,7 +2588,8 @@ namespace VULKAN_HPP_NAMESPACE { using WrappedType = VkDeviceQueueCreateFlagBits; static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = DeviceQueueCreateFlagBits::eProtected; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = + DeviceQueueCreateFlagBits::eProtected | DeviceQueueCreateFlagBits::eInternallySynchronizedKHR; }; // wrapper class for enum VkPipelineStageFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineStageFlagBits.html @@ -2861,6 +2885,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDescriptorHeapEXT = VK_BUFFER_USAGE_DESCRIPTOR_HEAP_BIT_EXT, eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, @@ -2892,9 +2917,9 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) | BufferUsageFlagBits::eExecutionGraphScratchAMDX #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR | - BufferUsageFlagBits::eShaderBindingTableKHR | BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR | - BufferUsageFlagBits::eSamplerDescriptorBufferEXT | BufferUsageFlagBits::eResourceDescriptorBufferEXT | + | BufferUsageFlagBits::eDescriptorHeapEXT | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | + BufferUsageFlagBits::eAccelerationStructureStorageKHR | BufferUsageFlagBits::eShaderBindingTableKHR | BufferUsageFlagBits::eVideoEncodeDstKHR | + BufferUsageFlagBits::eVideoEncodeSrcKHR | BufferUsageFlagBits::eSamplerDescriptorBufferEXT | BufferUsageFlagBits::eResourceDescriptorBufferEXT | BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits::eMicromapStorageEXT | BufferUsageFlagBits::eTileMemoryQCOM; }; @@ -4603,6 +4628,7 @@ namespace VULKAN_HPP_NAMESPACE eRotateKHR = VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR, eRotateClustered = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, eRotateClusteredKHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR, + ePartitionedEXT = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_EXT, ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV }; @@ -4617,7 +4643,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = SubgroupFeatureFlagBits::eBasic | SubgroupFeatureFlagBits::eVote | SubgroupFeatureFlagBits::eArithmetic | SubgroupFeatureFlagBits::eBallot | SubgroupFeatureFlagBits::eShuffle | SubgroupFeatureFlagBits::eShuffleRelative | SubgroupFeatureFlagBits::eClustered | SubgroupFeatureFlagBits::eQuad | - SubgroupFeatureFlagBits::eRotate | SubgroupFeatureFlagBits::eRotateClustered | SubgroupFeatureFlagBits::ePartitionedNV; + SubgroupFeatureFlagBits::eRotate | SubgroupFeatureFlagBits::eRotateClustered | SubgroupFeatureFlagBits::ePartitionedEXT; }; // wrapper class for enum VkDescriptorUpdateTemplateType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateType.html @@ -5000,6 +5026,8 @@ namespace VULKAN_HPP_NAMESPACE eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT, eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eSamplerHeapReadEXT = VK_ACCESS_2_SAMPLER_HEAP_READ_BIT_EXT, + eResourceHeapReadEXT = VK_ACCESS_2_RESOURCE_HEAP_READ_BIT_EXT, eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, eShaderTileAttachmentReadQCOM = VK_ACCESS_2_SHADER_TILE_ATTACHMENT_READ_BIT_QCOM, @@ -5051,15 +5079,16 @@ namespace VULKAN_HPP_NAMESPACE AccessFlagBits2::eDepthStencilAttachmentWrite | AccessFlagBits2::eTransferRead | AccessFlagBits2::eTransferWrite | AccessFlagBits2::eHostRead | AccessFlagBits2::eHostWrite | AccessFlagBits2::eMemoryRead | AccessFlagBits2::eMemoryWrite | AccessFlagBits2::eShaderSampledRead | AccessFlagBits2::eShaderStorageRead | AccessFlagBits2::eShaderStorageWrite | AccessFlagBits2::eVideoDecodeReadKHR | - AccessFlagBits2::eVideoDecodeWriteKHR | AccessFlagBits2::eVideoEncodeReadKHR | AccessFlagBits2::eVideoEncodeWriteKHR | - AccessFlagBits2::eShaderTileAttachmentReadQCOM | AccessFlagBits2::eShaderTileAttachmentWriteQCOM | AccessFlagBits2::eTransformFeedbackWriteEXT | - AccessFlagBits2::eTransformFeedbackCounterReadEXT | AccessFlagBits2::eTransformFeedbackCounterWriteEXT | AccessFlagBits2::eConditionalRenderingReadEXT | - AccessFlagBits2::eCommandPreprocessReadEXT | AccessFlagBits2::eCommandPreprocessWriteEXT | AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR | - AccessFlagBits2::eAccelerationStructureReadKHR | AccessFlagBits2::eAccelerationStructureWriteKHR | AccessFlagBits2::eFragmentDensityMapReadEXT | - AccessFlagBits2::eColorAttachmentReadNoncoherentEXT | AccessFlagBits2::eDescriptorBufferReadEXT | AccessFlagBits2::eInvocationMaskReadHUAWEI | - AccessFlagBits2::eShaderBindingTableReadKHR | AccessFlagBits2::eMicromapReadEXT | AccessFlagBits2::eMicromapWriteEXT | - AccessFlagBits2::eOpticalFlowReadNV | AccessFlagBits2::eOpticalFlowWriteNV | AccessFlagBits2::eDataGraphReadARM | AccessFlagBits2::eDataGraphWriteARM | - AccessFlagBits2::eMemoryDecompressionReadEXT | AccessFlagBits2::eMemoryDecompressionWriteEXT; + AccessFlagBits2::eVideoDecodeWriteKHR | AccessFlagBits2::eSamplerHeapReadEXT | AccessFlagBits2::eResourceHeapReadEXT | + AccessFlagBits2::eVideoEncodeReadKHR | AccessFlagBits2::eVideoEncodeWriteKHR | AccessFlagBits2::eShaderTileAttachmentReadQCOM | + AccessFlagBits2::eShaderTileAttachmentWriteQCOM | AccessFlagBits2::eTransformFeedbackWriteEXT | AccessFlagBits2::eTransformFeedbackCounterReadEXT | + AccessFlagBits2::eTransformFeedbackCounterWriteEXT | AccessFlagBits2::eConditionalRenderingReadEXT | AccessFlagBits2::eCommandPreprocessReadEXT | + AccessFlagBits2::eCommandPreprocessWriteEXT | AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits2::eAccelerationStructureReadKHR | + AccessFlagBits2::eAccelerationStructureWriteKHR | AccessFlagBits2::eFragmentDensityMapReadEXT | AccessFlagBits2::eColorAttachmentReadNoncoherentEXT | + AccessFlagBits2::eDescriptorBufferReadEXT | AccessFlagBits2::eInvocationMaskReadHUAWEI | AccessFlagBits2::eShaderBindingTableReadKHR | + AccessFlagBits2::eMicromapReadEXT | AccessFlagBits2::eMicromapWriteEXT | AccessFlagBits2::eOpticalFlowReadNV | AccessFlagBits2::eOpticalFlowWriteNV | + AccessFlagBits2::eDataGraphReadARM | AccessFlagBits2::eDataGraphWriteARM | AccessFlagBits2::eMemoryDecompressionReadEXT | + AccessFlagBits2::eMemoryDecompressionWriteEXT; }; // wrapper class for enum VkSubmitFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitFlagBits.html @@ -5292,6 +5321,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDescriptorHeapEXT = VK_BUFFER_USAGE_2_DESCRIPTOR_HEAP_BIT_EXT, eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, eShaderBindingTableKHR = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, @@ -5336,12 +5366,13 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) | BufferUsageFlagBits2::eExecutionGraphScratchAMDX #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | BufferUsageFlagBits2::eConditionalRenderingEXT | BufferUsageFlagBits2::eShaderBindingTableKHR | BufferUsageFlagBits2::eTransformFeedbackBufferEXT | - BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits2::eVideoDecodeSrcKHR | BufferUsageFlagBits2::eVideoDecodeDstKHR | - BufferUsageFlagBits2::eVideoEncodeDstKHR | BufferUsageFlagBits2::eVideoEncodeSrcKHR | BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR | - BufferUsageFlagBits2::eAccelerationStructureStorageKHR | BufferUsageFlagBits2::eSamplerDescriptorBufferEXT | - BufferUsageFlagBits2::eResourceDescriptorBufferEXT | BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT | - BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits2::eMicromapStorageEXT + | BufferUsageFlagBits2::eDescriptorHeapEXT | BufferUsageFlagBits2::eConditionalRenderingEXT | BufferUsageFlagBits2::eShaderBindingTableKHR | + BufferUsageFlagBits2::eTransformFeedbackBufferEXT | BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits2::eVideoDecodeSrcKHR | + BufferUsageFlagBits2::eVideoDecodeDstKHR | BufferUsageFlagBits2::eVideoEncodeDstKHR | BufferUsageFlagBits2::eVideoEncodeSrcKHR | + BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits2::eAccelerationStructureStorageKHR | + BufferUsageFlagBits2::eSamplerDescriptorBufferEXT | BufferUsageFlagBits2::eResourceDescriptorBufferEXT | + BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT | + BufferUsageFlagBits2::eMicromapStorageEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) | BufferUsageFlagBits2::eCompressedDataDgf1AMDX #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -5386,6 +5417,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDescriptorHeapEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_HEAP_BIT_EXT, eRayTracingAllowSpheresAndLinearSweptSpheresNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV, eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, @@ -5436,9 +5468,10 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) | PipelineCreateFlagBits2::eExecutionGraphAMDX #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV | PipelineCreateFlagBits2::eEnableLegacyDitheringEXT | - PipelineCreateFlagBits2::eDeferCompileNV | PipelineCreateFlagBits2::eCaptureStatisticsKHR | PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR | - PipelineCreateFlagBits2::eLinkTimeOptimizationEXT | PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2::eLibraryKHR | + | PipelineCreateFlagBits2::eDescriptorHeapEXT | PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV | + PipelineCreateFlagBits2::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2::eDeferCompileNV | PipelineCreateFlagBits2::eCaptureStatisticsKHR | + PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits2::eLinkTimeOptimizationEXT | + PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2::eLibraryKHR | PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR | PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR | @@ -6879,6 +6912,72 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCreateFlagsEXT allFlags = {}; }; + //=== VK_EXT_descriptor_heap === + + // wrapper class for enum VkDescriptorMappingSourceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceEXT.html + enum class DescriptorMappingSourceEXT + { + eHeapWithConstantOffset = VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_CONSTANT_OFFSET_EXT, + eHeapWithPushIndex = VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_PUSH_INDEX_EXT, + eHeapWithIndirectIndex = VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_INDIRECT_INDEX_EXT, + eHeapWithIndirectIndexArray = VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_INDIRECT_INDEX_ARRAY_EXT, + eResourceHeapData = VK_DESCRIPTOR_MAPPING_SOURCE_RESOURCE_HEAP_DATA_EXT, + ePushData = VK_DESCRIPTOR_MAPPING_SOURCE_PUSH_DATA_EXT, + ePushAddress = VK_DESCRIPTOR_MAPPING_SOURCE_PUSH_ADDRESS_EXT, + eIndirectAddress = VK_DESCRIPTOR_MAPPING_SOURCE_INDIRECT_ADDRESS_EXT, + eHeapWithShaderRecordIndex = VK_DESCRIPTOR_MAPPING_SOURCE_HEAP_WITH_SHADER_RECORD_INDEX_EXT, + eShaderRecordData = VK_DESCRIPTOR_MAPPING_SOURCE_SHADER_RECORD_DATA_EXT, + eShaderRecordAddress = VK_DESCRIPTOR_MAPPING_SOURCE_SHADER_RECORD_ADDRESS_EXT + }; + + // wrapper class for enum VkSpirvResourceTypeFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpirvResourceTypeFlagBitsEXT.html + enum class SpirvResourceTypeFlagBitsEXT : VkSpirvResourceTypeFlagsEXT + { + eAll = VK_SPIRV_RESOURCE_TYPE_ALL_EXT, + eSampler = VK_SPIRV_RESOURCE_TYPE_SAMPLER_BIT_EXT, + eSampledImage = VK_SPIRV_RESOURCE_TYPE_SAMPLED_IMAGE_BIT_EXT, + eReadOnlyImage = VK_SPIRV_RESOURCE_TYPE_READ_ONLY_IMAGE_BIT_EXT, + eReadWriteImage = VK_SPIRV_RESOURCE_TYPE_READ_WRITE_IMAGE_BIT_EXT, + eCombinedSampledImage = VK_SPIRV_RESOURCE_TYPE_COMBINED_SAMPLED_IMAGE_BIT_EXT, + eUniformBuffer = VK_SPIRV_RESOURCE_TYPE_UNIFORM_BUFFER_BIT_EXT, + eReadOnlyStorageBuffer = VK_SPIRV_RESOURCE_TYPE_READ_ONLY_STORAGE_BUFFER_BIT_EXT, + eReadWriteStorageBuffer = VK_SPIRV_RESOURCE_TYPE_READ_WRITE_STORAGE_BUFFER_BIT_EXT, + eAccelerationStructure = VK_SPIRV_RESOURCE_TYPE_ACCELERATION_STRUCTURE_BIT_EXT, + eTensorARM = VK_SPIRV_RESOURCE_TYPE_TENSOR_BIT_ARM + }; + + // wrapper using for bitmask VkSpirvResourceTypeFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpirvResourceTypeFlagsEXT.html + using SpirvResourceTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSpirvResourceTypeFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SpirvResourceTypeFlagsEXT allFlags = + SpirvResourceTypeFlagBitsEXT::eAll | SpirvResourceTypeFlagBitsEXT::eSampler | SpirvResourceTypeFlagBitsEXT::eSampledImage | + SpirvResourceTypeFlagBitsEXT::eReadOnlyImage | SpirvResourceTypeFlagBitsEXT::eReadWriteImage | SpirvResourceTypeFlagBitsEXT::eCombinedSampledImage | + SpirvResourceTypeFlagBitsEXT::eUniformBuffer | SpirvResourceTypeFlagBitsEXT::eReadOnlyStorageBuffer | + SpirvResourceTypeFlagBitsEXT::eReadWriteStorageBuffer | SpirvResourceTypeFlagBitsEXT::eAccelerationStructure | SpirvResourceTypeFlagBitsEXT::eTensorARM; + }; + + // wrapper class for enum VkTensorViewCreateFlagBitsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateFlagBitsARM.html + enum class TensorViewCreateFlagBitsARM : VkTensorViewCreateFlagsARM + { + eDescriptorBufferCaptureReplay = VK_TENSOR_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM + }; + + // wrapper using for bitmask VkTensorViewCreateFlagsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateFlagsARM.html + using TensorViewCreateFlagsARM = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkTensorViewCreateFlagBitsARM; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR TensorViewCreateFlagsARM allFlags = TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay; + }; + //=== VK_EXT_blend_operation_advanced === // wrapper class for enum VkBlendOverlapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlendOverlapEXT.html @@ -7527,6 +7626,7 @@ namespace VULKAN_HPP_NAMESPACE eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, + ePushData = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_DATA_NV, eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV, ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV @@ -8405,6 +8505,7 @@ namespace VULKAN_HPP_NAMESPACE { eMutableFormat = VK_TENSOR_CREATE_MUTABLE_FORMAT_BIT_ARM, eProtected = VK_TENSOR_CREATE_PROTECTED_BIT_ARM, + eDescriptorHeapCaptureReplay = VK_TENSOR_CREATE_DESCRIPTOR_HEAP_CAPTURE_REPLAY_BIT_ARM, eDescriptorBufferCaptureReplay = VK_TENSOR_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM }; @@ -8416,25 +8517,9 @@ namespace VULKAN_HPP_NAMESPACE { using WrappedType = VkTensorCreateFlagBitsARM; static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR TensorCreateFlagsARM allFlags = - TensorCreateFlagBitsARM::eMutableFormat | TensorCreateFlagBitsARM::eProtected | TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay; - }; - - // wrapper class for enum VkTensorViewCreateFlagBitsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateFlagBitsARM.html - enum class TensorViewCreateFlagBitsARM : VkTensorViewCreateFlagsARM - { - eDescriptorBufferCaptureReplay = VK_TENSOR_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM - }; - - // wrapper using for bitmask VkTensorViewCreateFlagsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateFlagsARM.html - using TensorViewCreateFlagsARM = Flags; - - template <> - struct FlagTraits - { - using WrappedType = VkTensorViewCreateFlagBitsARM; - static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR TensorViewCreateFlagsARM allFlags = TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay; + static VULKAN_HPP_CONST_OR_CONSTEXPR TensorCreateFlagsARM allFlags = TensorCreateFlagBitsARM::eMutableFormat | TensorCreateFlagBitsARM::eProtected | + TensorCreateFlagBitsARM::eDescriptorHeapCaptureReplay | + TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay; }; // wrapper class for enum VkTensorUsageFlagBitsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorUsageFlagBitsARM.html @@ -8617,6 +8702,7 @@ namespace VULKAN_HPP_NAMESPACE enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT { eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT, + eDescriptorHeap = VK_SHADER_CREATE_DESCRIPTOR_HEAP_BIT_EXT, eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT, eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT, @@ -8636,9 +8722,10 @@ namespace VULKAN_HPP_NAMESPACE using WrappedType = VkShaderCreateFlagBitsEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags = - ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | - ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | - ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | ShaderCreateFlagBitsEXT::eIndirectBindable | ShaderCreateFlagBitsEXT::e64BitIndexing; + ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eDescriptorHeap | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | + ShaderCreateFlagBitsEXT::eRequireFullSubgroups | ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | + ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | + ShaderCreateFlagBitsEXT::eIndirectBindable | ShaderCreateFlagBitsEXT::e64BitIndexing; }; // wrapper class for enum VkShaderCodeTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCodeTypeEXT.html @@ -9361,21 +9448,23 @@ namespace VULKAN_HPP_NAMESPACE // wrapper class for enum VkIndirectCommandsTokenTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsTokenTypeEXT.html enum class IndirectCommandsTokenTypeEXT { - eExecutionSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT, - ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT, - eSequenceIndex = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT, - eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT, - eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT, - eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT, - eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT, - eDrawIndexedCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT, - eDrawCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT, - eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT, - eDrawMeshTasksNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT, - eDrawMeshTasksCountNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT, - eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT, - eDrawMeshTasksCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT, - eTraceRays2 = VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT + eExecutionSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT, + eSequenceIndex = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT, + eDrawIndexedCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT, + eDrawCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT, + ePushData = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_DATA_EXT, + ePushDataSequenceIndex = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_DATA_SEQUENCE_INDEX_EXT, + eDrawMeshTasksNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT, + eDrawMeshTasksCountNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT, + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT, + eDrawMeshTasksCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT, + eTraceRays2 = VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT }; // wrapper class for enum VkIndirectExecutionSetInfoTypeEXT, see @@ -9640,6 +9729,10 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::eDebugUtilsMessengerEXT: return DebugReportObjectTypeEXT::eUnknown; + //=== VK_EXT_descriptor_heap === + case ObjectType::eTensorARM: + return DebugReportObjectTypeEXT::eUnknown; + //=== VK_KHR_acceleration_structure === case ObjectType::eAccelerationStructureKHR: return DebugReportObjectTypeEXT::eAccelerationStructureKHR; @@ -9679,7 +9772,6 @@ namespace VULKAN_HPP_NAMESPACE return DebugReportObjectTypeEXT::eUnknown; //=== VK_ARM_tensors === - case ObjectType::eTensorARM: return DebugReportObjectTypeEXT::eUnknown; case ObjectType::eTensorViewARM: return DebugReportObjectTypeEXT::eUnknown; diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index a8f30b4..785dc5f 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -67,6 +67,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_NV_ray_tracing", "VK_KHR_ray_tracing_pipeline" }, { "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address" }, { "VK_EXT_validation_features", "VK_EXT_layer_settings" }, + { "VK_EXT_descriptor_buffer", "VK_EXT_descriptor_heap" }, #if defined( VK_ENABLE_BETA_EXTENSIONS ) { "VK_NV_displacement_micromap", "VK_NV_cluster_acceleration_structure" } #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -175,6 +176,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) "VK_AMDX_shader_enqueue", #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_EXT_descriptor_heap", "VK_AMD_mixed_attachment_samples", "VK_AMD_shader_fragment_mask", "VK_EXT_inline_uniform_block", @@ -209,6 +211,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_draw_indirect_count", "VK_EXT_filter_cubic", "VK_QCOM_render_pass_shader_resolve", + "VK_QCOM_cooperative_matrix_conversion", "VK_EXT_global_priority", "VK_KHR_shader_subgroup_extended_types", "VK_KHR_8bit_storage", @@ -436,6 +439,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_ARM_shader_core_builtins", "VK_EXT_pipeline_library_group_handles", "VK_EXT_dynamic_rendering_unused_attachments", + "VK_KHR_internally_synchronized_queues", "VK_NV_low_latency2", "VK_KHR_cooperative_matrix", "VK_ARM_data_graph", @@ -485,6 +489,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_maintenance8", "VK_MESA_image_alignment_control", "VK_KHR_shader_fma", + "VK_NV_push_constant_bank", "VK_EXT_ray_tracing_invocation_reorder", "VK_EXT_depth_clamp_control", "VK_KHR_maintenance9", @@ -514,7 +519,8 @@ namespace VULKAN_HPP_NAMESPACE "VK_EXT_shader_long_vector", "VK_SEC_pipeline_cache_incremental_mode", "VK_EXT_shader_uniform_buffer_unsized_array", - "VK_NV_compute_occupancy_priority" }; + "VK_NV_compute_occupancy_priority", + "VK_EXT_shader_subgroup_partitioned" }; return deviceExtensions; } @@ -1105,6 +1111,16 @@ namespace VULKAN_HPP_NAMESPACE } } } } } #endif /*VK_ENABLE_BETA_EXTENSIONS*/ , + { "VK_EXT_descriptor_heap", + { { "VK_VERSION_1_0", + { { + "VK_KHR_buffer_device_address", + "VK_KHR_maintenance5", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_maintenance5", + } } } } }, { "VK_EXT_inline_uniform_block", { { "VK_VERSION_1_0", { { @@ -1226,6 +1242,11 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { {} } } } }, + { "VK_QCOM_cooperative_matrix_conversion", + { { "VK_VERSION_1_0", + { { + "VK_KHR_cooperative_matrix", + } } } } }, { "VK_KHR_shader_subgroup_extended_types", { { "VK_VERSION_1_1", { {} } } } }, { "VK_KHR_8bit_storage", { { "VK_VERSION_1_0", @@ -1817,11 +1838,8 @@ namespace VULKAN_HPP_NAMESPACE { "VK_QCOM_tile_shading", { { "VK_VERSION_1_0", { { - "VK_QCOM_tile_properties", - }, - { - "VK_KHR_get_physical_device_properties2", - } } } } }, + "VK_QCOM_tile_properties", + } } } } }, { "VK_KHR_synchronization2", { { "VK_VERSION_1_0", { { @@ -2498,22 +2516,21 @@ namespace VULKAN_HPP_NAMESPACE { "VK_KHR_surface_maintenance1", { { "VK_VERSION_1_0", { { - "VK_KHR_surface", - }, - { - "VK_KHR_get_surface_capabilities2", - } } } } }, + "VK_KHR_get_surface_capabilities2", + "VK_KHR_surface", + } } } } }, { "VK_KHR_swapchain_maintenance1", { { "VK_VERSION_1_0", { { - "VK_KHR_swapchain", - }, - { - "VK_KHR_surface_maintenance1", - }, - { - "VK_KHR_get_physical_device_properties2", - } } } } }, + "VK_KHR_get_physical_device_properties2", + "VK_KHR_surface_maintenance1", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_surface_maintenance1", + "VK_KHR_swapchain", + } } } } }, { "VK_QCOM_multiview_per_view_viewports", { { "VK_VERSION_1_0", { { @@ -2571,6 +2588,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_internally_synchronized_queues", { { "VK_VERSION_1_1", { {} } } } }, { "VK_NV_low_latency2", { { "VK_VERSION_1_0", { { @@ -3041,6 +3059,12 @@ namespace VULKAN_HPP_NAMESPACE } } }, { "VK_VERSION_1_1", { {} } } } }, { "VK_NV_compute_occupancy_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_subgroup_partitioned", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", @@ -3141,6 +3165,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_EXT_pipeline_creation_feedback", "VK_VERSION_1_3" }, { "VK_KHR_driver_properties", "VK_VERSION_1_2" }, { "VK_KHR_shader_float_controls", "VK_VERSION_1_2" }, + { "VK_NV_shader_subgroup_partitioned", "VK_EXT_shader_subgroup_partitioned" }, { "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2" }, { "VK_NV_compute_shader_derivatives", "VK_KHR_compute_shader_derivatives" }, { "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric" }, @@ -3281,6 +3306,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_EXT_layer_settings"; } + if ( extension == "VK_EXT_descriptor_buffer" ) + { + return "VK_EXT_descriptor_heap"; + } #if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( extension == "VK_NV_displacement_micromap" ) { @@ -3508,6 +3537,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_2"; } + if ( extension == "VK_NV_shader_subgroup_partitioned" ) + { + return "VK_EXT_shader_subgroup_partitioned"; + } if ( extension == "VK_KHR_depth_stencil_resolve" ) { return "VK_VERSION_1_2"; @@ -3767,7 +3800,7 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_MVK_macos_surface" ) #endif /*VK_USE_PLATFORM_MACOS_MVK*/ || ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_EXT_buffer_device_address" ) || - ( extension == "VK_EXT_validation_features" ) + ( extension == "VK_EXT_validation_features" ) || ( extension == "VK_EXT_descriptor_buffer" ) #if defined( VK_ENABLE_BETA_EXTENSIONS ) || ( extension == "VK_NV_displacement_micromap" ) #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -3830,7 +3863,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) || ( extension == "VK_AMDX_shader_enqueue" ) #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || + || ( extension == "VK_EXT_descriptor_heap" ) || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_KHR_shader_bfloat16" ) || ( extension == "VK_EXT_sample_locations" ) || ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) || @@ -3845,8 +3878,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_ENABLE_BETA_EXTENSIONS*/ || ( extension == "VK_NV_shading_rate_image" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_NV_representative_fragment_test" ) || ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_filter_cubic" ) || - ( extension == "VK_QCOM_render_pass_shader_resolve" ) || ( extension == "VK_EXT_global_priority" ) || - ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_QCOM_render_pass_shader_resolve" ) || ( extension == "VK_QCOM_cooperative_matrix_conversion" ) || + ( extension == "VK_EXT_global_priority" ) || ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || ( extension == "VK_EXT_external_memory_host" ) || ( extension == "VK_AMD_buffer_marker" ) || ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_KHR_shader_clock" ) || ( extension == "VK_AMD_pipeline_compiler_control" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || ( extension == "VK_AMD_shader_core_properties" ) || ( extension == "VK_KHR_video_decode_h265" ) || ( extension == "VK_KHR_global_priority" ) || @@ -3962,14 +3995,14 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_NV_cooperative_vector" ) || ( extension == "VK_NV_extended_sparse_address_space" ) || ( extension == "VK_EXT_mutable_descriptor_type" ) || ( extension == "VK_EXT_legacy_vertex_attributes" ) || ( extension == "VK_ARM_shader_core_builtins" ) || ( extension == "VK_EXT_pipeline_library_group_handles" ) || - ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || ( extension == "VK_NV_low_latency2" ) || - ( extension == "VK_KHR_cooperative_matrix" ) || ( extension == "VK_ARM_data_graph" ) || ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || - ( extension == "VK_KHR_compute_shader_derivatives" ) || ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_encode_av1" ) || - ( extension == "VK_KHR_video_decode_vp9" ) || ( extension == "VK_KHR_video_maintenance1" ) || ( extension == "VK_NV_per_stage_descriptor_set" ) || - ( extension == "VK_QCOM_image_processing2" ) || ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || - ( extension == "VK_QCOM_filter_cubic_clamp" ) || ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || - ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || - ( extension == "VK_KHR_unified_image_layouts" ) || ( extension == "VK_KHR_shader_float_controls2" ) + ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || ( extension == "VK_KHR_internally_synchronized_queues" ) || + ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || ( extension == "VK_ARM_data_graph" ) || + ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || ( extension == "VK_KHR_compute_shader_derivatives" ) || + ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_encode_av1" ) || ( extension == "VK_KHR_video_decode_vp9" ) || + ( extension == "VK_KHR_video_maintenance1" ) || ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || + ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || + ( extension == "VK_KHR_load_store_op_none" ) || ( extension == "VK_KHR_unified_image_layouts" ) || ( extension == "VK_KHR_shader_float_controls2" ) #if defined( VK_USE_PLATFORM_SCREEN_QNX ) || ( extension == "VK_QNX_external_memory_screen_buffer" ) #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ @@ -3985,9 +4018,9 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_NV_cluster_acceleration_structure" ) || ( extension == "VK_NV_partitioned_acceleration_structure" ) || ( extension == "VK_EXT_device_generated_commands" ) || ( extension == "VK_KHR_maintenance8" ) || ( extension == "VK_MESA_image_alignment_control" ) || ( extension == "VK_KHR_shader_fma" ) || - ( extension == "VK_EXT_ray_tracing_invocation_reorder" ) || ( extension == "VK_EXT_depth_clamp_control" ) || - ( extension == "VK_KHR_maintenance9" ) || ( extension == "VK_KHR_video_maintenance2" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || - ( extension == "VK_NV_cooperative_matrix2" ) || ( extension == "VK_ARM_pipeline_opacity_micromap" ) + ( extension == "VK_NV_push_constant_bank" ) || ( extension == "VK_EXT_ray_tracing_invocation_reorder" ) || + ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_KHR_maintenance9" ) || ( extension == "VK_KHR_video_maintenance2" ) || + ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ) || ( extension == "VK_ARM_pipeline_opacity_micromap" ) #if defined( VK_USE_PLATFORM_METAL_EXT ) || ( extension == "VK_EXT_external_memory_metal" ) #endif /*VK_USE_PLATFORM_METAL_EXT*/ @@ -4001,7 +4034,8 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_KHR_present_mode_fifo_latest_ready" ) || ( extension == "VK_EXT_shader_64bit_indexing" ) || ( extension == "VK_EXT_custom_resolve" ) || ( extension == "VK_QCOM_data_graph_model" ) || ( extension == "VK_KHR_maintenance10" ) || ( extension == "VK_EXT_shader_long_vector" ) || ( extension == "VK_SEC_pipeline_cache_incremental_mode" ) || - ( extension == "VK_EXT_shader_uniform_buffer_unsized_array" ) || ( extension == "VK_NV_compute_occupancy_priority" ); + ( extension == "VK_EXT_shader_uniform_buffer_unsized_array" ) || ( extension == "VK_NV_compute_occupancy_priority" ) || + ( extension == "VK_EXT_shader_subgroup_partitioned" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) @@ -4099,7 +4133,8 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || ( extension == "VK_KHR_global_priority" ) || ( extension == "VK_EXT_vertex_attribute_divisor" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) || - ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || + ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || + ( extension == "VK_NV_shader_subgroup_partitioned" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || ( extension == "VK_EXT_scalar_block_layout" ) || diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index 69df08e..41f3a2a 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -12503,6 +12503,17 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + // wrapper function for command vkGetDeviceCombinedImageSamplerIndexNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceCombinedImageSamplerIndexNVX.html + template ::type> + VULKAN_HPP_INLINE uint64_t Device::getCombinedImageSamplerIndexNVX( uint64_t imageViewIndex, + uint64_t samplerIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceCombinedImageSamplerIndexNVX( static_cast( m_device ), imageViewIndex, samplerIndex ); + } + //=== VK_AMD_draw_indirect_count === // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html @@ -17011,6 +17022,379 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkWriteSamplerDescriptorsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteSamplerDescriptorsEXT.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeSamplerDescriptorsEXT( uint32_t samplerCount, + const SamplerCreateInfo * pSamplers, + const HostAddressRangeEXT * pDescriptors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteSamplerDescriptorsEXT( static_cast( m_device ), + samplerCount, + reinterpret_cast( pSamplers ), + reinterpret_cast( pDescriptors ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteSamplerDescriptorsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteSamplerDescriptorsEXT.html + template ::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::writeSamplerDescriptorsEXT( + ArrayProxy const & samplers, ArrayProxy const & descriptors, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteSamplerDescriptorsEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( samplers.size() == descriptors.size() ); +# else + if ( samplers.size() != descriptors.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::writeSamplerDescriptorsEXT: samplers.size() != descriptors.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = static_cast( d.vkWriteSamplerDescriptorsEXT( m_device, + samplers.size(), + reinterpret_cast( samplers.data() ), + reinterpret_cast( descriptors.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeSamplerDescriptorsEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteResourceDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteResourceDescriptorsEXT.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeResourceDescriptorsEXT( uint32_t resourceCount, + const ResourceDescriptorInfoEXT * pResources, + const HostAddressRangeEXT * pDescriptors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteResourceDescriptorsEXT( static_cast( m_device ), + resourceCount, + reinterpret_cast( pResources ), + reinterpret_cast( pDescriptors ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteResourceDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteResourceDescriptorsEXT.html + template ::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::writeResourceDescriptorsEXT( + ArrayProxy const & resources, ArrayProxy const & descriptors, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteResourceDescriptorsEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resources.size() == descriptors.size() ); +# else + if ( resources.size() != descriptors.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::writeResourceDescriptorsEXT: resources.size() != descriptors.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = static_cast( d.vkWriteResourceDescriptorsEXT( m_device, + resources.size(), + reinterpret_cast( resources.data() ), + reinterpret_cast( descriptors.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeResourceDescriptorsEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindSamplerHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindSamplerHeapEXT.html + template ::type> + VULKAN_HPP_INLINE void CommandBuffer::bindSamplerHeapEXT( const BindHeapInfoEXT * pBindInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindSamplerHeapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pBindInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindSamplerHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindSamplerHeapEXT.html + template ::type> + VULKAN_HPP_INLINE void CommandBuffer::bindSamplerHeapEXT( const BindHeapInfoEXT & bindInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindSamplerHeapEXT && "Function requires " ); +# endif + + d.vkCmdBindSamplerHeapEXT( m_commandBuffer, reinterpret_cast( &bindInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindResourceHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindResourceHeapEXT.html + template ::type> + VULKAN_HPP_INLINE void CommandBuffer::bindResourceHeapEXT( const BindHeapInfoEXT * pBindInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindResourceHeapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pBindInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindResourceHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindResourceHeapEXT.html + template ::type> + VULKAN_HPP_INLINE void CommandBuffer::bindResourceHeapEXT( const BindHeapInfoEXT & bindInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindResourceHeapEXT && "Function requires " ); +# endif + + d.vkCmdBindResourceHeapEXT( m_commandBuffer, reinterpret_cast( &bindInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDataEXT.html + template ::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDataEXT( const PushDataInfoEXT * pPushDataInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDataEXT( static_cast( m_commandBuffer ), reinterpret_cast( pPushDataInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDataEXT.html + template ::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDataEXT( const PushDataInfoEXT & pushDataInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDataEXT && "Function requires " ); +# endif + + d.vkCmdPushDataEXT( m_commandBuffer, reinterpret_cast( &pushDataInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDataEXT( uint32_t imageCount, + const Image * pImages, + HostAddressRangeEXT * pDatas, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageOpaqueCaptureDataEXT( + static_cast( m_device ), imageCount, reinterpret_cast( pImages ), reinterpret_cast( pDatas ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template ::value, int>::type, + typename std::enable_if::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getImageOpaqueCaptureDataEXT( ArrayProxy const & images, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDataEXT && "Function requires " ); +# endif + + std::vector datas( images.size() ); + Result result = static_cast( d.vkGetImageOpaqueCaptureDataEXT( + m_device, images.size(), reinterpret_cast( images.data() ), reinterpret_cast( datas.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDataEXT" ); + + return detail::createResultValueType( result, std::move( datas ) ); + } + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template ::value, int>::type, + typename std::enable_if::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getImageOpaqueCaptureDataEXT( ArrayProxy const & images, + HostAddressRangeEXTAllocator const & hostAddressRangeEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDataEXT && "Function requires " ); +# endif + + std::vector datas( images.size(), {}, hostAddressRangeEXTAllocator ); + Result result = static_cast( d.vkGetImageOpaqueCaptureDataEXT( + m_device, images.size(), reinterpret_cast( images.data() ), reinterpret_cast( datas.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDataEXT" ); + + return detail::createResultValueType( result, std::move( datas ) ); + } + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageOpaqueCaptureDataEXT( const Image & image, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDataEXT && "Function requires " ); +# endif + + HostAddressRangeEXT data; + Result result = static_cast( + d.vkGetImageOpaqueCaptureDataEXT( m_device, 1, reinterpret_cast( &image ), reinterpret_cast( &data ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDataEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDescriptorSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDescriptorSizeEXT.html + template ::type> + VULKAN_HPP_INLINE DeviceSize PhysicalDevice::getDescriptorSizeEXT( DescriptorType descriptorType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDescriptorSizeEXT( static_cast( m_physicalDevice ), static_cast( descriptorType ) ) ); + } + + // wrapper function for command vkRegisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterCustomBorderColorEXT.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerCustomBorderColorEXT( const SamplerCustomBorderColorCreateInfoEXT * pBorderColor, + Bool32 requestIndex, + uint32_t * pIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterCustomBorderColorEXT( static_cast( m_device ), + reinterpret_cast( pBorderColor ), + static_cast( requestIndex ), + pIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterCustomBorderColorEXT.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerCustomBorderColorEXT( const SamplerCustomBorderColorCreateInfoEXT & borderColor, Bool32 requestIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterCustomBorderColorEXT && "Function requires " ); +# endif + + uint32_t index; + Result result = static_cast( d.vkRegisterCustomBorderColorEXT( + m_device, reinterpret_cast( &borderColor ), static_cast( requestIndex ), &index ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerCustomBorderColorEXT" ); + + return detail::createResultValueType( result, std::move( index ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnregisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnregisterCustomBorderColorEXT.html + template ::type> + VULKAN_HPP_INLINE void Device::unregisterCustomBorderColorEXT( uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUnregisterCustomBorderColorEXT( static_cast( m_device ), index ); + } + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getTensorOpaqueCaptureDataARM( uint32_t tensorCount, + const TensorARM * pTensors, + HostAddressRangeEXT * pDatas, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetTensorOpaqueCaptureDataARM( static_cast( m_device ), + tensorCount, + reinterpret_cast( pTensors ), + reinterpret_cast( pDatas ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template ::value, int>::type, + typename std::enable_if::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getTensorOpaqueCaptureDataARM( ArrayProxy const & tensors, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorOpaqueCaptureDataARM && "Function requires " ); +# endif + + std::vector datas( tensors.size() ); + Result result = static_cast( d.vkGetTensorOpaqueCaptureDataARM( + m_device, tensors.size(), reinterpret_cast( tensors.data() ), reinterpret_cast( datas.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDataARM" ); + + return detail::createResultValueType( result, std::move( datas ) ); + } + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template ::value, int>::type, + typename std::enable_if::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getTensorOpaqueCaptureDataARM( ArrayProxy const & tensors, + HostAddressRangeEXTAllocator const & hostAddressRangeEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorOpaqueCaptureDataARM && "Function requires " ); +# endif + + std::vector datas( tensors.size(), {}, hostAddressRangeEXTAllocator ); + Result result = static_cast( d.vkGetTensorOpaqueCaptureDataARM( + m_device, tensors.size(), reinterpret_cast( tensors.data() ), reinterpret_cast( datas.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDataARM" ); + + return detail::createResultValueType( result, std::move( datas ) ); + } + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template ::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getTensorOpaqueCaptureDataARM( const TensorARM & tensor, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorOpaqueCaptureDataARM && "Function requires " ); +# endif + + HostAddressRangeEXT data; + Result result = static_cast( + d.vkGetTensorOpaqueCaptureDataARM( m_device, 1, reinterpret_cast( &tensor ), reinterpret_cast( &data ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDataARM" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_sample_locations === // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index dbf3582..7d1e6bb 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -945,6 +945,36 @@ namespace VULKAN_HPP_NAMESPACE union DeviceOrHostAddressConstAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + struct HostAddressRangeEXT; + struct HostAddressRangeConstEXT; + struct DeviceAddressRangeEXT; + struct TexelBufferDescriptorInfoEXT; + struct ImageDescriptorInfoEXT; + struct ResourceDescriptorInfoEXT; + union ResourceDescriptorDataEXT; + struct BindHeapInfoEXT; + struct PushDataInfoEXT; + struct DescriptorMappingSourceConstantOffsetEXT; + struct DescriptorMappingSourcePushIndexEXT; + struct DescriptorMappingSourceIndirectIndexEXT; + struct DescriptorMappingSourceHeapDataEXT; + struct DescriptorMappingSourceIndirectAddressEXT; + struct DescriptorMappingSourceShaderRecordIndexEXT; + union DescriptorMappingSourceDataEXT; + struct DescriptorMappingSourceIndirectIndexArrayEXT; + struct DescriptorSetAndBindingMappingEXT; + struct ShaderDescriptorSetAndBindingMappingInfoEXT; + struct OpaqueCaptureDataCreateInfoEXT; + struct PhysicalDeviceDescriptorHeapFeaturesEXT; + struct PhysicalDeviceDescriptorHeapPropertiesEXT; + struct CommandBufferInheritanceDescriptorHeapInfoEXT; + struct SamplerCustomBorderColorIndexCreateInfoEXT; + struct IndirectCommandsLayoutPushDataTokenNV; + struct SubsampledImageFormatPropertiesEXT; + struct TensorViewCreateInfoARM; + struct PhysicalDeviceDescriptorHeapTensorPropertiesARM; + //=== VK_AMD_mixed_attachment_samples === struct AttachmentSampleCountInfoAMD; using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; @@ -1067,6 +1097,9 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceImageViewImageFormatInfoEXT; struct FilterCubicImageViewImageFormatPropertiesEXT; + //=== VK_QCOM_cooperative_matrix_conversion === + struct PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + //=== VK_EXT_external_memory_host === struct ImportMemoryHostPointerInfoEXT; struct MemoryHostPointerPropertiesEXT; @@ -1713,7 +1746,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_ARM_tensors === struct TensorDescriptionARM; struct TensorCreateInfoARM; - struct TensorViewCreateInfoARM; struct TensorMemoryRequirementsInfoARM; struct BindTensorMemoryInfoARM; struct WriteDescriptorSetTensorARM; @@ -1881,6 +1913,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_dynamic_rendering_unused_attachments === struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + //=== VK_KHR_internally_synchronized_queues === + struct PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + //=== VK_NV_low_latency2 === struct LatencySleepModeInfoNV; struct LatencySleepInfoNV; @@ -2164,6 +2199,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_fma === struct PhysicalDeviceShaderFmaFeaturesKHR; + //=== VK_NV_push_constant_bank === + struct PushConstantBankInfoNV; + struct PhysicalDevicePushConstantBankFeaturesNV; + struct PhysicalDevicePushConstantBankPropertiesNV; + //=== VK_EXT_ray_tracing_invocation_reorder === struct PhysicalDeviceRayTracingInvocationReorderPropertiesEXT; struct PhysicalDeviceRayTracingInvocationReorderFeaturesEXT; @@ -2292,6 +2332,9 @@ namespace VULKAN_HPP_NAMESPACE struct ComputeOccupancyPriorityParametersNV; struct PhysicalDeviceComputeOccupancyPriorityFeaturesNV; + //=== VK_EXT_shader_subgroup_partitioned === + struct PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + //=================================== //=== HANDLE forward declarations === //=================================== @@ -2354,6 +2397,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_utils === class DebugUtilsMessengerEXT; + //=== VK_EXT_descriptor_heap === + class TensorARM; + //=== VK_KHR_acceleration_structure === class AccelerationStructureKHR; @@ -2744,6 +2790,16 @@ namespace VULKAN_HPP_NAMESPACE using UniqueDebugUtilsMessengerEXT = UniqueHandle; + //=== VK_EXT_descriptor_heap === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueTensorARM = UniqueHandle; + //=== VK_KHR_acceleration_structure === template class UniqueHandleTraits @@ -2848,15 +2904,6 @@ namespace VULKAN_HPP_NAMESPACE using UniqueMicromapEXT = UniqueHandle; //=== VK_ARM_tensors === - template - class UniqueHandleTraits - { - public: - using deleter = detail::ObjectDestroy; - }; - - using UniqueTensorARM = UniqueHandle; - template class UniqueHandleTraits { @@ -6525,6 +6572,35 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkCmdBindSamplerHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindSamplerHeapEXT.html + template ::type = true> + void bindSamplerHeapEXT( const BindHeapInfoEXT * pBindInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindSamplerHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindSamplerHeapEXT.html + template ::type = true> + void bindSamplerHeapEXT( const BindHeapInfoEXT & bindInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindResourceHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindResourceHeapEXT.html + template ::type = true> + void bindResourceHeapEXT( const BindHeapInfoEXT * pBindInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindResourceHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindResourceHeapEXT.html + template ::type = true> + void bindResourceHeapEXT( const BindHeapInfoEXT & bindInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDataEXT.html + template ::type = true> + void pushDataEXT( const PushDataInfoEXT * pPushDataInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDataEXT.html + template ::type = true> + void pushDataEXT( const PushDataInfoEXT & pushDataInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_sample_locations === // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html @@ -14186,6 +14262,14 @@ namespace VULKAN_HPP_NAMESPACE getImageViewAddressNVX( ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + // wrapper function for command vkGetDeviceCombinedImageSamplerIndexNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceCombinedImageSamplerIndexNVX.html + template ::type = true> + uint64_t getCombinedImageSamplerIndexNVX( uint64_t imageViewIndex, + uint64_t samplerIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_AMD_shader_info === // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html @@ -14922,6 +15006,137 @@ namespace VULKAN_HPP_NAMESPACE # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkWriteSamplerDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteSamplerDescriptorsEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD Result writeSamplerDescriptorsEXT( uint32_t samplerCount, + const SamplerCreateInfo * pSamplers, + const HostAddressRangeEXT * pDescriptors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteSamplerDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteSamplerDescriptorsEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + writeSamplerDescriptorsEXT( ArrayProxy const & samplers, + ArrayProxy const & descriptors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteResourceDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteResourceDescriptorsEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD Result writeResourceDescriptorsEXT( uint32_t resourceCount, + const ResourceDescriptorInfoEXT * pResources, + const HostAddressRangeEXT * pDescriptors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteResourceDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteResourceDescriptorsEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + writeResourceDescriptorsEXT( ArrayProxy const & resources, + ArrayProxy const & descriptors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDataEXT( uint32_t imageCount, + const Image * pImages, + HostAddressRangeEXT * pDatas, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0, + typename std::enable_if::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageOpaqueCaptureDataEXT( ArrayProxy const & images, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0, + typename std::enable_if::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageOpaqueCaptureDataEXT( ArrayProxy const & images, + HostAddressRangeEXTAllocator const & hostAddressRangeEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageOpaqueCaptureDataEXT( const Image & image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterCustomBorderColorEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD Result registerCustomBorderColorEXT( const SamplerCustomBorderColorCreateInfoEXT * pBorderColor, + Bool32 requestIndex, + uint32_t * pIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterCustomBorderColorEXT.html + template ::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType::type registerCustomBorderColorEXT( + const SamplerCustomBorderColorCreateInfoEXT & borderColor, Bool32 requestIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnregisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnregisterCustomBorderColorEXT.html + template ::type = true> + void unregisterCustomBorderColorEXT( uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template ::type = true> + VULKAN_HPP_NODISCARD Result getTensorOpaqueCaptureDataARM( uint32_t tensorCount, + const TensorARM * pTensors, + HostAddressRangeEXT * pDatas, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0, + typename std::enable_if::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getTensorOpaqueCaptureDataARM( ArrayProxy const & tensors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0, + typename std::enable_if::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getTensorOpaqueCaptureDataARM( ArrayProxy const & tensors, + HostAddressRangeEXTAllocator const & hostAddressRangeEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + template ::type = true> + VULKAN_HPP_NODISCARD typename ResultValueType::type + getTensorOpaqueCaptureDataARM( const TensorARM & tensor, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_get_memory_requirements2 === // wrapper function for command vkGetImageMemoryRequirements2KHR, see @@ -20866,6 +21081,14 @@ namespace VULKAN_HPP_NAMESPACE getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkGetPhysicalDeviceDescriptorSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDescriptorSizeEXT.html + template ::type = true> + DeviceSize getDescriptorSizeEXT( DescriptorType descriptorType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_sample_locations === // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index 02df84b..ff7e778 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -391,6 +391,17 @@ namespace std } }; + //=== VK_EXT_descriptor_heap === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TensorARM const & tensorARM ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( tensorARM ) ); + } + }; + //=== VK_KHR_acceleration_structure === template <> @@ -505,15 +516,6 @@ namespace std //=== VK_ARM_tensors === - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TensorARM const & tensorARM ) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}( static_cast( tensorARM ) ); - } - }; - template <> struct hash { @@ -1497,6 +1499,33 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceAddressRangeEXT const & deviceAddressRangeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressRangeEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressRangeEXT.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindHeapInfoEXT const & bindHeapInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindHeapInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindHeapInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindHeapInfoEXT.heapRange ); + VULKAN_HPP_HASH_COMBINE( seed, bindHeapInfoEXT.reservedRangeOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bindHeapInfoEXT.reservedRangeSize ); + return seed; + } + }; + template <> struct hash { @@ -2677,6 +2706,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceDescriptorHeapInfoEXT const & commandBufferInheritanceDescriptorHeapInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceDescriptorHeapInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceDescriptorHeapInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceDescriptorHeapInfoEXT.pSamplerHeapBindInfo ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceDescriptorHeapInfoEXT.pResourceHeapBindInfo ); + return seed; + } + }; + template <> struct hash { @@ -4213,6 +4257,160 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.magFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipmapMode ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeU ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeV ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeW ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.anisotropyEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.borderColor ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.unnormalizedCoordinates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceConstantOffsetEXT const & descriptorMappingSourceConstantOffsetEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceConstantOffsetEXT.heapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceConstantOffsetEXT.heapArrayStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceConstantOffsetEXT.pEmbeddedSampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceConstantOffsetEXT.samplerHeapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceConstantOffsetEXT.samplerHeapArrayStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourcePushIndexEXT const & descriptorMappingSourcePushIndexEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.heapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.pushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.heapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.heapArrayStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.pEmbeddedSampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.useCombinedImageSamplerIndex ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.samplerHeapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.samplerPushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.samplerHeapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourcePushIndexEXT.samplerHeapArrayStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectIndexEXT const & descriptorMappingSourceIndirectIndexEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.heapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.pushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.addressOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.heapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.heapArrayStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.pEmbeddedSampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.useCombinedImageSamplerIndex ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.samplerHeapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.samplerPushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.samplerAddressOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.samplerHeapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexEXT.samplerHeapArrayStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectIndexArrayEXT const & descriptorMappingSourceIndirectIndexArrayEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.heapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.pushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.addressOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.heapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.pEmbeddedSampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.useCombinedImageSamplerIndex ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.samplerHeapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.samplerPushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.samplerAddressOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectIndexArrayEXT.samplerHeapIndexStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceHeapDataEXT const & descriptorMappingSourceHeapDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceHeapDataEXT.heapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceHeapDataEXT.pushOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectAddressEXT const & descriptorMappingSourceIndirectAddressEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectAddressEXT.pushOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceIndirectAddressEXT.addressOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceShaderRecordIndexEXT const & descriptorMappingSourceShaderRecordIndexEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.heapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.shaderRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.heapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.heapArrayStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.pEmbeddedSampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.useCombinedImageSamplerIndex ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.samplerHeapOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.samplerShaderRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.samplerHeapIndexStride ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorMappingSourceShaderRecordIndexEXT.samplerHeapArrayStride ); + return seed; + } + }; + template <> struct hash { @@ -6806,6 +7004,30 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostAddressRangeConstEXT const & hostAddressRangeConstEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostAddressRangeConstEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, hostAddressRangeConstEXT.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostAddressRangeEXT const & hostAddressRangeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostAddressRangeEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, hostAddressRangeEXT.size ); + return seed; + } + }; + template <> struct hash { @@ -6980,6 +7202,38 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.viewType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDescriptorInfoEXT const & imageDescriptorInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDescriptorInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDescriptorInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDescriptorInfoEXT.pView ); + VULKAN_HPP_HASH_COMBINE( seed, imageDescriptorInfoEXT.layout ); + return seed; + } + }; + template <> struct hash { @@ -7240,24 +7494,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.flags ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.image ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.viewType ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.format ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.components ); - VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.subresourceRange ); - return seed; - } - }; - template <> struct hash { @@ -7752,6 +7988,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutPushDataTokenNV const & indirectCommandsLayoutPushDataTokenNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutPushDataTokenNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutPushDataTokenNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutPushDataTokenNV.pushDataOffset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutPushDataTokenNV.pushDataSize ); + return seed; + } + }; + template <> struct hash { @@ -8629,6 +8880,19 @@ namespace std }; # endif /*VK_USE_PLATFORM_OHOS*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpaqueCaptureDataCreateInfoEXT const & opaqueCaptureDataCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDataCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDataCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDataCreateInfoEXT.pData ); + return seed; + } + }; + template <> struct hash { @@ -9538,6 +9802,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & + physicalDeviceCooperativeMatrixConversionFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixConversionFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixConversionFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixConversionFeaturesQCOM.cooperativeMatrixConversion ); + return seed; + } + }; + template <> struct hash { @@ -10060,6 +10338,69 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapFeaturesEXT const & physicalDeviceDescriptorHeapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapFeaturesEXT.descriptorHeap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapFeaturesEXT.descriptorHeapCaptureReplay ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapPropertiesEXT const & physicalDeviceDescriptorHeapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.samplerHeapAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.resourceHeapAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.maxSamplerHeapSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.maxResourceHeapSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.minSamplerHeapReservedRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.minSamplerHeapReservedRangeWithEmbedded ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.minResourceHeapReservedRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.samplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.imageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.bufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.samplerDescriptorAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.imageDescriptorAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.bufferDescriptorAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.maxPushDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.imageCaptureReplayOpaqueDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.maxDescriptorHeapEmbeddedSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.samplerYcbcrConversionCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.sparseDescriptorHeaps ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapPropertiesEXT.protectedDescriptorHeaps ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapTensorPropertiesARM const & physicalDeviceDescriptorHeapTensorPropertiesARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapTensorPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapTensorPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapTensorPropertiesARM.tensorDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapTensorPropertiesARM.tensorDescriptorAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorHeapTensorPropertiesARM.tensorCaptureReplayOpaqueDataSize ); + return seed; + } + }; + template <> struct hash { @@ -11490,6 +11831,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & + physicalDeviceInternallySynchronizedQueuesFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInternallySynchronizedQueuesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInternallySynchronizedQueuesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInternallySynchronizedQueuesFeaturesKHR.internallySynchronizedQueues ); + return seed; + } + }; + template <> struct hash { @@ -13104,6 +13459,37 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushConstantBankFeaturesNV const & physicalDevicePushConstantBankFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankFeaturesNV.pushConstantBank ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushConstantBankPropertiesNV const & physicalDevicePushConstantBankPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankPropertiesNV.maxGraphicsPushConstantBanks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankPropertiesNV.maxComputePushConstantBanks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankPropertiesNV.maxGraphicsPushDataBanks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushConstantBankPropertiesNV.maxComputePushDataBanks ); + return seed; + } + }; + template <> struct hash { @@ -14236,6 +14622,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & physicalDeviceShaderSubgroupPartitionedFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupPartitionedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupPartitionedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupPartitionedFeaturesEXT.shaderSubgroupPartitioned ); + return seed; + } + }; + template <> struct hash { @@ -16601,6 +17002,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantBankInfoNV const & pushConstantBankInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantBankInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantBankInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantBankInfoNV.bank ); + return seed; + } + }; + template <> struct hash { @@ -16618,6 +17032,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDataInfoEXT const & pushDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDataInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushDataInfoEXT.data ); + return seed; + } + }; + template <> struct hash { @@ -17599,6 +18027,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TexelBufferDescriptorInfoEXT const & texelBufferDescriptorInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, texelBufferDescriptorInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, texelBufferDescriptorInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, texelBufferDescriptorInfoEXT.format ); + VULKAN_HPP_HASH_COMBINE( seed, texelBufferDescriptorInfoEXT.addressRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const & tensorViewCreateInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.flags ); + VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.tensor ); + VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.format ); + return seed; + } + }; + template <> struct hash { @@ -17642,34 +18099,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.flags ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.magFilter ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minFilter ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipmapMode ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeU ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeV ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeW ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipLodBias ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.anisotropyEnable ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxAnisotropy ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareEnable ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareOp ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minLod ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxLod ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.borderColor ); - VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.unnormalizedCoordinates ); - return seed; - } - }; - template <> struct hash { @@ -17683,6 +18112,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorIndexCreateInfoEXT const & samplerCustomBorderColorIndexCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCustomBorderColorIndexCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCustomBorderColorIndexCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCustomBorderColorIndexCreateInfoEXT.index ); + return seed; + } + }; + template <> struct hash { @@ -18003,6 +18446,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderDescriptorSetAndBindingMappingInfoEXT const & shaderDescriptorSetAndBindingMappingInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderDescriptorSetAndBindingMappingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderDescriptorSetAndBindingMappingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderDescriptorSetAndBindingMappingInfoEXT.mappingCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderDescriptorSetAndBindingMappingInfoEXT.pMappings ); + return seed; + } + }; + template <> struct hash { @@ -18300,6 +18758,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubsampledImageFormatPropertiesEXT const & subsampledImageFormatPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subsampledImageFormatPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subsampledImageFormatPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subsampledImageFormatPropertiesEXT.subsampledImageDescriptorCount ); + return seed; + } + }; + template <> struct hash { @@ -18809,21 +19280,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const & tensorViewCreateInfoARM ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.sType ); - VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.flags ); - VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.tensor ); - VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.format ); - return seed; - } - }; - template <> struct hash { diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index 598394d..9f18b89 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -285,6 +285,10 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + //=== VK_EXT_descriptor_heap === + vkGetPhysicalDeviceDescriptorSizeEXT = + PFN_vkGetPhysicalDeviceDescriptorSizeEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDescriptorSizeEXT" ) ); + //=== VK_EXT_sample_locations === vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); @@ -591,6 +595,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + //=== VK_EXT_descriptor_heap === + PFN_vkGetPhysicalDeviceDescriptorSizeEXT vkGetPhysicalDeviceDescriptorSizeEXT = 0; + //=== VK_EXT_sample_locations === PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; @@ -984,6 +991,8 @@ namespace VULKAN_HPP_NAMESPACE vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetDeviceCombinedImageSamplerIndexNVX = + PFN_vkGetDeviceCombinedImageSamplerIndexNVX( vkGetDeviceProcAddr( device, "vkGetDeviceCombinedImageSamplerIndexNVX" ) ); //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); @@ -1156,6 +1165,17 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + vkWriteSamplerDescriptorsEXT = PFN_vkWriteSamplerDescriptorsEXT( vkGetDeviceProcAddr( device, "vkWriteSamplerDescriptorsEXT" ) ); + vkWriteResourceDescriptorsEXT = PFN_vkWriteResourceDescriptorsEXT( vkGetDeviceProcAddr( device, "vkWriteResourceDescriptorsEXT" ) ); + vkCmdBindSamplerHeapEXT = PFN_vkCmdBindSamplerHeapEXT( vkGetDeviceProcAddr( device, "vkCmdBindSamplerHeapEXT" ) ); + vkCmdBindResourceHeapEXT = PFN_vkCmdBindResourceHeapEXT( vkGetDeviceProcAddr( device, "vkCmdBindResourceHeapEXT" ) ); + vkCmdPushDataEXT = PFN_vkCmdPushDataEXT( vkGetDeviceProcAddr( device, "vkCmdPushDataEXT" ) ); + vkGetImageOpaqueCaptureDataEXT = PFN_vkGetImageOpaqueCaptureDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDataEXT" ) ); + vkRegisterCustomBorderColorEXT = PFN_vkRegisterCustomBorderColorEXT( vkGetDeviceProcAddr( device, "vkRegisterCustomBorderColorEXT" ) ); + vkUnregisterCustomBorderColorEXT = PFN_vkUnregisterCustomBorderColorEXT( vkGetDeviceProcAddr( device, "vkUnregisterCustomBorderColorEXT" ) ); + vkGetTensorOpaqueCaptureDataARM = PFN_vkGetTensorOpaqueCaptureDataARM( vkGetDeviceProcAddr( device, "vkGetTensorOpaqueCaptureDataARM" ) ); + //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); @@ -2226,9 +2246,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetDeviceCombinedImageSamplerIndexNVX vkGetDeviceCombinedImageSamplerIndexNVX = 0; //=== VK_AMD_draw_indirect_count === PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; @@ -2380,6 +2401,17 @@ namespace VULKAN_HPP_NAMESPACE PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + PFN_vkWriteSamplerDescriptorsEXT vkWriteSamplerDescriptorsEXT = 0; + PFN_vkWriteResourceDescriptorsEXT vkWriteResourceDescriptorsEXT = 0; + PFN_vkCmdBindSamplerHeapEXT vkCmdBindSamplerHeapEXT = 0; + PFN_vkCmdBindResourceHeapEXT vkCmdBindResourceHeapEXT = 0; + PFN_vkCmdPushDataEXT vkCmdPushDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDataEXT vkGetImageOpaqueCaptureDataEXT = 0; + PFN_vkRegisterCustomBorderColorEXT vkRegisterCustomBorderColorEXT = 0; + PFN_vkUnregisterCustomBorderColorEXT vkUnregisterCustomBorderColorEXT = 0; + PFN_vkGetTensorOpaqueCaptureDataARM vkGetTensorOpaqueCaptureDataARM = 0; + //=== VK_EXT_sample_locations === PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; @@ -3050,6 +3082,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_utils === class DebugUtilsMessengerEXT; + //=== VK_EXT_descriptor_heap === + class TensorARM; + //=== VK_KHR_acceleration_structure === class AccelerationStructureKHR; @@ -4006,6 +4041,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo ) const; + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkGetPhysicalDeviceDescriptorSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDescriptorSizeEXT.html + VULKAN_HPP_NODISCARD DeviceSize getDescriptorSizeEXT( DescriptorType descriptorType ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_sample_locations === // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see @@ -4678,6 +4719,10 @@ namespace VULKAN_HPP_NAMESPACE // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html VULKAN_HPP_NODISCARD uint64_t getImageViewHandle64NVX( const ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceCombinedImageSamplerIndexNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceCombinedImageSamplerIndexNVX.html + VULKAN_HPP_NODISCARD uint64_t getCombinedImageSamplerIndexNVX( uint64_t imageViewIndex, uint64_t samplerIndex ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_device_group === // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see @@ -4861,6 +4906,46 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator = nullptr ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkWriteSamplerDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteSamplerDescriptorsEXT.html + typename ResultValueType::type writeSamplerDescriptorsEXT( ArrayProxy const & samplers, + ArrayProxy const & descriptors ) const; + + // wrapper function for command vkWriteResourceDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteResourceDescriptorsEXT.html + typename ResultValueType::type writeResourceDescriptorsEXT( ArrayProxy const & resources, + ArrayProxy const & descriptors ) const; + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageOpaqueCaptureDataEXT( ArrayProxy const & images ) const; + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + VULKAN_HPP_NODISCARD typename ResultValueType::type getImageOpaqueCaptureDataEXT( const VULKAN_HPP_NAMESPACE::Image & image ) const; + + // wrapper function for command vkRegisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterCustomBorderColorEXT.html + VULKAN_HPP_NODISCARD typename ResultValueType::type registerCustomBorderColorEXT( const SamplerCustomBorderColorCreateInfoEXT & borderColor, + Bool32 requestIndex ) const; + + // wrapper function for command vkUnregisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnregisterCustomBorderColorEXT.html + void unregisterCustomBorderColorEXT( uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getTensorOpaqueCaptureDataARM( ArrayProxy const & tensors ) const; + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + VULKAN_HPP_NODISCARD typename ResultValueType::type + getTensorOpaqueCaptureDataARM( const VULKAN_HPP_NAMESPACE::TensorARM & tensor ) const; + //=== VK_KHR_get_memory_requirements2 === // wrapper function for command vkGetImageMemoryRequirements2KHR, see @@ -7252,6 +7337,17 @@ namespace VULKAN_HPP_NAMESPACE void dispatchGraphIndirectCountAMDX( DeviceAddress scratch, DeviceSize scratchSize, DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkCmdBindSamplerHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindSamplerHeapEXT.html + void bindSamplerHeapEXT( const BindHeapInfoEXT & bindInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindResourceHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindResourceHeapEXT.html + void bindResourceHeapEXT( const BindHeapInfoEXT & bindInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDataEXT.html + void pushDataEXT( const PushDataInfoEXT & pushDataInfo ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_sample_locations === // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html @@ -19620,6 +19716,19 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } + // wrapper function for command vkGetDeviceCombinedImageSamplerIndexNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceCombinedImageSamplerIndexNVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getCombinedImageSamplerIndexNVX( uint64_t imageViewIndex, + uint64_t samplerIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceCombinedImageSamplerIndexNVX && + "Function requires " ); + + uint64_t result = getDispatcher()->vkGetDeviceCombinedImageSamplerIndexNVX( static_cast( m_device ), imageViewIndex, samplerIndex ); + + return result; + } + //=== VK_AMD_draw_indirect_count === // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html @@ -21634,6 +21743,187 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_descriptor_heap === + + // wrapper function for command vkWriteSamplerDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteSamplerDescriptorsEXT.html + VULKAN_HPP_INLINE typename ResultValueType::type Device::writeSamplerDescriptorsEXT( ArrayProxy const & samplers, + ArrayProxy const & descriptors ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteSamplerDescriptorsEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( samplers.size() == descriptors.size() ); +# else + if ( samplers.size() != descriptors.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::writeSamplerDescriptorsEXT: samplers.size() != descriptors.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = + static_cast( getDispatcher()->vkWriteSamplerDescriptorsEXT( static_cast( m_device ), + samplers.size(), + reinterpret_cast( samplers.data() ), + reinterpret_cast( descriptors.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::writeSamplerDescriptorsEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } + + // wrapper function for command vkWriteResourceDescriptorsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteResourceDescriptorsEXT.html + VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeResourceDescriptorsEXT( ArrayProxy const & resources, + ArrayProxy const & descriptors ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteResourceDescriptorsEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resources.size() == descriptors.size() ); +# else + if ( resources.size() != descriptors.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::writeResourceDescriptorsEXT: resources.size() != descriptors.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = + static_cast( getDispatcher()->vkWriteResourceDescriptorsEXT( static_cast( m_device ), + resources.size(), + reinterpret_cast( resources.data() ), + reinterpret_cast( descriptors.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::writeResourceDescriptorsEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } + + // wrapper function for command vkCmdBindSamplerHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindSamplerHeapEXT.html + VULKAN_HPP_INLINE void CommandBuffer::bindSamplerHeapEXT( const BindHeapInfoEXT & bindInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindSamplerHeapEXT && "Function requires " ); + + getDispatcher()->vkCmdBindSamplerHeapEXT( static_cast( m_commandBuffer ), reinterpret_cast( &bindInfo ) ); + } + + // wrapper function for command vkCmdBindResourceHeapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindResourceHeapEXT.html + VULKAN_HPP_INLINE void CommandBuffer::bindResourceHeapEXT( const BindHeapInfoEXT & bindInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindResourceHeapEXT && "Function requires " ); + + getDispatcher()->vkCmdBindResourceHeapEXT( static_cast( m_commandBuffer ), reinterpret_cast( &bindInfo ) ); + } + + // wrapper function for command vkCmdPushDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDataEXT.html + VULKAN_HPP_INLINE void CommandBuffer::pushDataEXT( const PushDataInfoEXT & pushDataInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDataEXT && "Function requires " ); + + getDispatcher()->vkCmdPushDataEXT( static_cast( m_commandBuffer ), reinterpret_cast( &pushDataInfo ) ); + } + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getImageOpaqueCaptureDataEXT( ArrayProxy const & images ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDataEXT && "Function requires " ); + + std::vector datas( images.size() ); + Result result = static_cast( getDispatcher()->vkGetImageOpaqueCaptureDataEXT( static_cast( m_device ), + images.size(), + reinterpret_cast( images.data() ), + reinterpret_cast( datas.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( datas ) ); + } + + // wrapper function for command vkGetImageOpaqueCaptureDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDataEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageOpaqueCaptureDataEXT( const VULKAN_HPP_NAMESPACE::Image & image ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDataEXT && "Function requires " ); + + HostAddressRangeEXT data; + Result result = static_cast( getDispatcher()->vkGetImageOpaqueCaptureDataEXT( + static_cast( m_device ), 1, reinterpret_cast( &image ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDescriptorSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDescriptorSizeEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DeviceSize PhysicalDevice::getDescriptorSizeEXT( DescriptorType descriptorType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDescriptorSizeEXT && + "Function requires " ); + + VkDeviceSize result = getDispatcher()->vkGetPhysicalDeviceDescriptorSizeEXT( static_cast( m_physicalDevice ), + static_cast( descriptorType ) ); + + return static_cast( result ); + } + + // wrapper function for command vkRegisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterCustomBorderColorEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerCustomBorderColorEXT( const SamplerCustomBorderColorCreateInfoEXT & borderColor, Bool32 requestIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkRegisterCustomBorderColorEXT && "Function requires " ); + + uint32_t index; + Result result = + static_cast( getDispatcher()->vkRegisterCustomBorderColorEXT( static_cast( m_device ), + reinterpret_cast( &borderColor ), + static_cast( requestIndex ), + &index ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::registerCustomBorderColorEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( index ) ); + } + + // wrapper function for command vkUnregisterCustomBorderColorEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnregisterCustomBorderColorEXT.html + VULKAN_HPP_INLINE void Device::unregisterCustomBorderColorEXT( uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnregisterCustomBorderColorEXT && "Function requires " ); + + getDispatcher()->vkUnregisterCustomBorderColorEXT( static_cast( m_device ), index ); + } + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getTensorOpaqueCaptureDataARM( ArrayProxy const & tensors ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetTensorOpaqueCaptureDataARM && "Function requires " ); + + std::vector datas( tensors.size() ); + Result result = static_cast( getDispatcher()->vkGetTensorOpaqueCaptureDataARM( static_cast( m_device ), + tensors.size(), + reinterpret_cast( tensors.data() ), + reinterpret_cast( datas.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDataARM" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( datas ) ); + } + + // wrapper function for command vkGetTensorOpaqueCaptureDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDataARM.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getTensorOpaqueCaptureDataARM( const VULKAN_HPP_NAMESPACE::TensorARM & tensor ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetTensorOpaqueCaptureDataARM && "Function requires " ); + + HostAddressRangeEXT data; + Result result = static_cast( getDispatcher()->vkGetTensorOpaqueCaptureDataARM( + static_cast( m_device ), 1, reinterpret_cast( &tensor ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_RAII_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDataARM" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + //=== VK_EXT_sample_locations === // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html diff --git a/third_party/vulkan/vulkan_shared.hpp b/third_party/vulkan/vulkan_shared.hpp index 8aa2f58..9fe99de 100644 --- a/third_party/vulkan/vulkan_shared.hpp +++ b/third_party/vulkan/vulkan_shared.hpp @@ -825,6 +825,17 @@ namespace VULKAN_HPP_NAMESPACE using SharedDebugUtilsMessengerEXT = SharedHandle; + //=== VK_EXT_descriptor_heap === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedTensorARM = SharedHandle; + //=== VK_KHR_acceleration_structure === template <> class SharedHandleTraits @@ -939,16 +950,6 @@ namespace VULKAN_HPP_NAMESPACE using SharedMicromapEXT = SharedHandle; //=== VK_ARM_tensors === - template <> - class SharedHandleTraits - { - public: - using DestructorType = Device; - using deleter = detail::ObjectDestroyShared; - }; - - using SharedTensorARM = SharedHandle; - template <> class SharedHandleTraits { diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index 06fcfc7..87e02f8 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -3603,6 +3603,197 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "TensorARM is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "TensorARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostAddressRangeEXT ) == sizeof( VkHostAddressRangeEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostAddressRangeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostAddressRangeConstEXT ) == sizeof( VkHostAddressRangeConstEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostAddressRangeConstEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressRangeEXT ) == sizeof( VkDeviceAddressRangeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceAddressRangeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TexelBufferDescriptorInfoEXT ) == sizeof( VkTexelBufferDescriptorInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TexelBufferDescriptorInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDescriptorInfoEXT ) == sizeof( VkImageDescriptorInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDescriptorInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResourceDescriptorInfoEXT ) == sizeof( VkResourceDescriptorInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ResourceDescriptorInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResourceDescriptorDataEXT ) == sizeof( VkResourceDescriptorDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ResourceDescriptorDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindHeapInfoEXT ) == sizeof( VkBindHeapInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindHeapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDataInfoEXT ) == sizeof( VkPushDataInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceConstantOffsetEXT ) == sizeof( VkDescriptorMappingSourceConstantOffsetEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceConstantOffsetEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourcePushIndexEXT ) == sizeof( VkDescriptorMappingSourcePushIndexEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourcePushIndexEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectIndexEXT ) == sizeof( VkDescriptorMappingSourceIndirectIndexEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceIndirectIndexEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceHeapDataEXT ) == sizeof( VkDescriptorMappingSourceHeapDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceHeapDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectAddressEXT ) == sizeof( VkDescriptorMappingSourceIndirectAddressEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceIndirectAddressEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceShaderRecordIndexEXT ) == + sizeof( VkDescriptorMappingSourceShaderRecordIndexEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceShaderRecordIndexEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceDataEXT ) == sizeof( VkDescriptorMappingSourceDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorMappingSourceIndirectIndexArrayEXT ) == + sizeof( VkDescriptorMappingSourceIndirectIndexArrayEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorMappingSourceIndirectIndexArrayEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAndBindingMappingEXT ) == sizeof( VkDescriptorSetAndBindingMappingEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetAndBindingMappingEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderDescriptorSetAndBindingMappingInfoEXT ) == + sizeof( VkShaderDescriptorSetAndBindingMappingInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderDescriptorSetAndBindingMappingInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpaqueCaptureDataCreateInfoEXT ) == sizeof( VkOpaqueCaptureDataCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpaqueCaptureDataCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorHeapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorHeapFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorHeapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorHeapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceDescriptorHeapInfoEXT ) == + sizeof( VkCommandBufferInheritanceDescriptorHeapInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceDescriptorHeapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorIndexCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorIndexCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCustomBorderColorIndexCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutPushDataTokenNV ) == sizeof( VkIndirectCommandsLayoutPushDataTokenNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutPushDataTokenNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubsampledImageFormatPropertiesEXT ) == sizeof( VkSubsampledImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubsampledImageFormatPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM ) == sizeof( VkTensorViewCreateInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TensorViewCreateInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorHeapTensorPropertiesARM ) == + sizeof( VkPhysicalDeviceDescriptorHeapTensorPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorHeapTensorPropertiesARM is not nothrow_move_constructible!" ); + //=== VK_AMD_mixed_attachment_samples === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), @@ -4211,6 +4402,16 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" ); +//=== VK_QCOM_cooperative_matrix_conversion === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM ) == + sizeof( VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM is not nothrow_move_constructible!" ); + //=== VK_EXT_external_memory_host === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), @@ -6972,12 +7173,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "TensorCreateInfoARM is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM ) == sizeof( VkTensorViewCreateInfoARM ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "TensorViewCreateInfoARM is not nothrow_move_constructible!" ); - VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM ) == sizeof( VkTensorMemoryRequirementsInfoARM ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -7699,6 +7894,16 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_KHR_internally_synchronized_queues === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR ) == + sizeof( VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR is not nothrow_move_constructible!" ); + //=== VK_NV_low_latency2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV ) == sizeof( VkLatencySleepModeInfoNV ), @@ -9141,6 +9346,28 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderFmaFeaturesKHR is not nothrow_move_constructible!" ); +//=== VK_NV_push_constant_bank === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantBankInfoNV ) == sizeof( VkPushConstantBankInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantBankInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushConstantBankFeaturesNV ) == sizeof( VkPhysicalDevicePushConstantBankFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePushConstantBankFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushConstantBankPropertiesNV ) == sizeof( VkPhysicalDevicePushConstantBankPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePushConstantBankPropertiesNV is not nothrow_move_constructible!" ); + //=== VK_EXT_ray_tracing_invocation_reorder === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesEXT ) == @@ -9631,4 +9858,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceComputeOccupancyPriorityFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_EXT_shader_subgroup_partitioned === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT is not nothrow_move_constructible!" ); + #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index ee6a2ca..1c9cc27 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -11841,6 +11841,270 @@ namespace VULKAN_HPP_NAMESPACE using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; + // wrapper struct for struct VkDeviceAddressRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressRangeEXT.html + struct DeviceAddressRangeEXT + { + using NativeType = VkDeviceAddressRangeEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceAddressRangeEXT( DeviceAddress address_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : address{ address_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceAddressRangeEXT( DeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceAddressRangeEXT( VkDeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceAddressRangeEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceAddressRangeEXT & operator=( DeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceAddressRangeEXT & operator=( VkDeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT & setAddress( DeviceAddress address_ ) & VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT && setAddress( DeviceAddress address_ ) && VULKAN_HPP_NOEXCEPT + { + address = address_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT + { + size = size_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceAddressRangeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceAddressRangeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceAddressRangeEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceAddressRangeEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( address, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceAddressRangeEXT const & ) const = default; +#else + bool operator==( DeviceAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( address == rhs.address ) && ( size == rhs.size ); +# endif + } + + bool operator!=( DeviceAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + DeviceAddress address = {}; + DeviceSize size = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DeviceAddressRangeEXT; + }; +#endif + + // wrapper struct for struct VkBindHeapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindHeapInfoEXT.html + struct BindHeapInfoEXT + { + using NativeType = VkBindHeapInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindHeapInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindHeapInfoEXT( DeviceAddressRangeEXT heapRange_ = {}, + DeviceSize reservedRangeOffset_ = {}, + DeviceSize reservedRangeSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , heapRange{ heapRange_ } + , reservedRangeOffset{ reservedRangeOffset_ } + , reservedRangeSize{ reservedRangeSize_ } + { + } + + VULKAN_HPP_CONSTEXPR BindHeapInfoEXT( BindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindHeapInfoEXT( VkBindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BindHeapInfoEXT( *reinterpret_cast( &rhs ) ) {} + + BindHeapInfoEXT & operator=( BindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindHeapInfoEXT & operator=( VkBindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setHeapRange( DeviceAddressRangeEXT const & heapRange_ ) & VULKAN_HPP_NOEXCEPT + { + heapRange = heapRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setHeapRange( DeviceAddressRangeEXT const & heapRange_ ) && VULKAN_HPP_NOEXCEPT + { + heapRange = heapRange_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setReservedRangeOffset( DeviceSize reservedRangeOffset_ ) & VULKAN_HPP_NOEXCEPT + { + reservedRangeOffset = reservedRangeOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setReservedRangeOffset( DeviceSize reservedRangeOffset_ ) && VULKAN_HPP_NOEXCEPT + { + reservedRangeOffset = reservedRangeOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setReservedRangeSize( DeviceSize reservedRangeSize_ ) & VULKAN_HPP_NOEXCEPT + { + reservedRangeSize = reservedRangeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setReservedRangeSize( DeviceSize reservedRangeSize_ ) && VULKAN_HPP_NOEXCEPT + { + reservedRangeSize = reservedRangeSize_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindHeapInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindHeapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindHeapInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindHeapInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, heapRange, reservedRangeOffset, reservedRangeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindHeapInfoEXT const & ) const = default; +#else + bool operator==( BindHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapRange == rhs.heapRange ) && ( reservedRangeOffset == rhs.reservedRangeOffset ) && + ( reservedRangeSize == rhs.reservedRangeSize ); +# endif + } + + bool operator!=( BindHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eBindHeapInfoEXT; + const void * pNext = {}; + DeviceAddressRangeEXT heapRange = {}; + DeviceSize reservedRangeOffset = {}; + DeviceSize reservedRangeSize = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = BindHeapInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = BindHeapInfoEXT; + }; + // wrapper struct for struct VkOffset2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset2D.html struct Offset2D { @@ -24812,6 +25076,152 @@ namespace VULKAN_HPP_NAMESPACE using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; }; + // wrapper struct for struct VkCommandBufferInheritanceDescriptorHeapInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceDescriptorHeapInfoEXT.html + struct CommandBufferInheritanceDescriptorHeapInfoEXT + { + using NativeType = VkCommandBufferInheritanceDescriptorHeapInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceDescriptorHeapInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceDescriptorHeapInfoEXT( const BindHeapInfoEXT * pSamplerHeapBindInfo_ = {}, + const BindHeapInfoEXT * pResourceHeapBindInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pSamplerHeapBindInfo{ pSamplerHeapBindInfo_ } + , pResourceHeapBindInfo{ pResourceHeapBindInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceDescriptorHeapInfoEXT( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceDescriptorHeapInfoEXT( VkCommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceDescriptorHeapInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceDescriptorHeapInfoEXT & operator=( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceDescriptorHeapInfoEXT & operator=( VkCommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT & setPSamplerHeapBindInfo( const BindHeapInfoEXT * pSamplerHeapBindInfo_ ) & + VULKAN_HPP_NOEXCEPT + { + pSamplerHeapBindInfo = pSamplerHeapBindInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT && setPSamplerHeapBindInfo( const BindHeapInfoEXT * pSamplerHeapBindInfo_ ) && + VULKAN_HPP_NOEXCEPT + { + pSamplerHeapBindInfo = pSamplerHeapBindInfo_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT & setPResourceHeapBindInfo( const BindHeapInfoEXT * pResourceHeapBindInfo_ ) & + VULKAN_HPP_NOEXCEPT + { + pResourceHeapBindInfo = pResourceHeapBindInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT && setPResourceHeapBindInfo( const BindHeapInfoEXT * pResourceHeapBindInfo_ ) && + VULKAN_HPP_NOEXCEPT + { + pResourceHeapBindInfo = pResourceHeapBindInfo_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceDescriptorHeapInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceDescriptorHeapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceDescriptorHeapInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceDescriptorHeapInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pSamplerHeapBindInfo, pResourceHeapBindInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceDescriptorHeapInfoEXT const & ) const = default; +#else + bool operator==( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSamplerHeapBindInfo == rhs.pSamplerHeapBindInfo ) && + ( pResourceHeapBindInfo == rhs.pResourceHeapBindInfo ); +# endif + } + + bool operator!=( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eCommandBufferInheritanceDescriptorHeapInfoEXT; + const void * pNext = {}; + const BindHeapInfoEXT * pSamplerHeapBindInfo = {}; + const BindHeapInfoEXT * pResourceHeapBindInfo = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = CommandBufferInheritanceDescriptorHeapInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = CommandBufferInheritanceDescriptorHeapInfoEXT; + }; + // wrapper struct for struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderPassTransformInfoQCOM.html struct CommandBufferInheritanceRenderPassTransformInfoQCOM @@ -42259,6 +42669,2074 @@ namespace VULKAN_HPP_NAMESPACE using Type = DescriptorGetTensorInfoARM; }; + // wrapper struct for struct VkSamplerCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateInfo.html + struct SamplerCreateInfo + { + using NativeType = VkSamplerCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateFlags flags_ = {}, + Filter magFilter_ = Filter::eNearest, + Filter minFilter_ = Filter::eNearest, + SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, + SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, + SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, + SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + Bool32 compareEnable_ = {}, + CompareOp compareOp_ = CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, + Bool32 unnormalizedCoordinates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , magFilter{ magFilter_ } + , minFilter{ minFilter_ } + , mipmapMode{ mipmapMode_ } + , addressModeU{ addressModeU_ } + , addressModeV{ addressModeV_ } + , addressModeW{ addressModeW_ } + , mipLodBias{ mipLodBias_ } + , anisotropyEnable{ anisotropyEnable_ } + , maxAnisotropy{ maxAnisotropy_ } + , compareEnable{ compareEnable_ } + , compareOp{ compareOp_ } + , minLod{ minLod_ } + , maxLod{ maxLod_ } + , borderColor{ borderColor_ } + , unnormalizedCoordinates{ unnormalizedCoordinates_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( SamplerCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setFlags( SamplerCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( Filter magFilter_ ) & VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMagFilter( Filter magFilter_ ) && VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( Filter minFilter_ ) & VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMinFilter( Filter minFilter_ ) && VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( SamplerMipmapMode mipmapMode_ ) & VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMipmapMode( SamplerMipmapMode mipmapMode_ ) && VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( SamplerAddressMode addressModeU_ ) & VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeU( SamplerAddressMode addressModeU_ ) && VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( SamplerAddressMode addressModeV_ ) & VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeV( SamplerAddressMode addressModeV_ ) && VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( SamplerAddressMode addressModeW_ ) & VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeW( SamplerAddressMode addressModeW_ ) && VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) & VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMipLodBias( float mipLodBias_ ) && VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( Bool32 anisotropyEnable_ ) & VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAnisotropyEnable( Bool32 anisotropyEnable_ ) && VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) & VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMaxAnisotropy( float maxAnisotropy_ ) && VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( Bool32 compareEnable_ ) & VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setCompareEnable( Bool32 compareEnable_ ) && VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( CompareOp compareOp_ ) & VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setCompareOp( CompareOp compareOp_ ) && VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) & VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMinLod( float minLod_ ) && VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) & VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMaxLod( float maxLod_ ) && VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( BorderColor borderColor_ ) & VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setBorderColor( BorderColor borderColor_ ) && VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) & VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) && VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + magFilter, + minFilter, + mipmapMode, + addressModeU, + addressModeV, + addressModeW, + mipLodBias, + anisotropyEnable, + maxAnisotropy, + compareEnable, + compareOp, + minLod, + maxLod, + borderColor, + unnormalizedCoordinates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && + ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && + ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); +# endif + } + + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + SamplerCreateFlags flags = {}; + Filter magFilter = Filter::eNearest; + Filter minFilter = Filter::eNearest; + SamplerMipmapMode mipmapMode = SamplerMipmapMode::eNearest; + SamplerAddressMode addressModeU = SamplerAddressMode::eRepeat; + SamplerAddressMode addressModeV = SamplerAddressMode::eRepeat; + SamplerAddressMode addressModeW = SamplerAddressMode::eRepeat; + float mipLodBias = {}; + Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + Bool32 compareEnable = {}; + CompareOp compareOp = CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + BorderColor borderColor = BorderColor::eFloatTransparentBlack; + Bool32 unnormalizedCoordinates = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; +#endif + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + // wrapper struct for struct VkDescriptorMappingSourceConstantOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceConstantOffsetEXT.html + struct DescriptorMappingSourceConstantOffsetEXT + { + using NativeType = VkDescriptorMappingSourceConstantOffsetEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceConstantOffsetEXT( uint32_t heapOffset_ = {}, + uint32_t heapArrayStride_ = {}, + const SamplerCreateInfo * pEmbeddedSampler_ = {}, + uint32_t samplerHeapOffset_ = {}, + uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT + : heapOffset{ heapOffset_ } + , heapArrayStride{ heapArrayStride_ } + , pEmbeddedSampler{ pEmbeddedSampler_ } + , samplerHeapOffset{ samplerHeapOffset_ } + , samplerHeapArrayStride{ samplerHeapArrayStride_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceConstantOffsetEXT( DescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourceConstantOffsetEXT( VkDescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourceConstantOffsetEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourceConstantOffsetEXT & operator=( DescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourceConstantOffsetEXT & operator=( VkDescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && + VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceConstantOffsetEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceConstantOffsetEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceConstantOffsetEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceConstantOffsetEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( heapOffset, heapArrayStride, pEmbeddedSampler, samplerHeapOffset, samplerHeapArrayStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourceConstantOffsetEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourceConstantOffsetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( heapOffset == rhs.heapOffset ) && ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && + ( samplerHeapOffset == rhs.samplerHeapOffset ) && ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); +# endif + } + + bool operator!=( DescriptorMappingSourceConstantOffsetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t heapOffset = {}; + uint32_t heapArrayStride = {}; + const SamplerCreateInfo * pEmbeddedSampler = {}; + uint32_t samplerHeapOffset = {}; + uint32_t samplerHeapArrayStride = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceConstantOffsetEXT; + }; +#endif + + // wrapper struct for struct VkDescriptorMappingSourcePushIndexEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourcePushIndexEXT.html + struct DescriptorMappingSourcePushIndexEXT + { + using NativeType = VkDescriptorMappingSourcePushIndexEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourcePushIndexEXT( uint32_t heapOffset_ = {}, + uint32_t pushOffset_ = {}, + uint32_t heapIndexStride_ = {}, + uint32_t heapArrayStride_ = {}, + const SamplerCreateInfo * pEmbeddedSampler_ = {}, + Bool32 useCombinedImageSamplerIndex_ = {}, + uint32_t samplerHeapOffset_ = {}, + uint32_t samplerPushOffset_ = {}, + uint32_t samplerHeapIndexStride_ = {}, + uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT + : heapOffset{ heapOffset_ } + , pushOffset{ pushOffset_ } + , heapIndexStride{ heapIndexStride_ } + , heapArrayStride{ heapArrayStride_ } + , pEmbeddedSampler{ pEmbeddedSampler_ } + , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } + , samplerHeapOffset{ samplerHeapOffset_ } + , samplerPushOffset{ samplerPushOffset_ } + , samplerHeapIndexStride{ samplerHeapIndexStride_ } + , samplerHeapArrayStride{ samplerHeapArrayStride_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourcePushIndexEXT( DescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourcePushIndexEXT( VkDescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourcePushIndexEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourcePushIndexEXT & operator=( DescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourcePushIndexEXT & operator=( VkDescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerPushOffset( uint32_t samplerPushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerPushOffset = samplerPushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerPushOffset( uint32_t samplerPushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerPushOffset = samplerPushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourcePushIndexEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourcePushIndexEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourcePushIndexEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourcePushIndexEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( heapOffset, + pushOffset, + heapIndexStride, + heapArrayStride, + pEmbeddedSampler, + useCombinedImageSamplerIndex, + samplerHeapOffset, + samplerPushOffset, + samplerHeapIndexStride, + samplerHeapArrayStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourcePushIndexEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourcePushIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ) && ( heapIndexStride == rhs.heapIndexStride ) && + ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && + ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && + ( samplerPushOffset == rhs.samplerPushOffset ) && ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ) && + ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); +# endif + } + + bool operator!=( DescriptorMappingSourcePushIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t heapOffset = {}; + uint32_t pushOffset = {}; + uint32_t heapIndexStride = {}; + uint32_t heapArrayStride = {}; + const SamplerCreateInfo * pEmbeddedSampler = {}; + Bool32 useCombinedImageSamplerIndex = {}; + uint32_t samplerHeapOffset = {}; + uint32_t samplerPushOffset = {}; + uint32_t samplerHeapIndexStride = {}; + uint32_t samplerHeapArrayStride = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourcePushIndexEXT; + }; +#endif + + // wrapper struct for struct VkDescriptorMappingSourceIndirectIndexEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceIndirectIndexEXT.html + struct DescriptorMappingSourceIndirectIndexEXT + { + using NativeType = VkDescriptorMappingSourceIndirectIndexEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexEXT( uint32_t heapOffset_ = {}, + uint32_t pushOffset_ = {}, + uint32_t addressOffset_ = {}, + uint32_t heapIndexStride_ = {}, + uint32_t heapArrayStride_ = {}, + const SamplerCreateInfo * pEmbeddedSampler_ = {}, + Bool32 useCombinedImageSamplerIndex_ = {}, + uint32_t samplerHeapOffset_ = {}, + uint32_t samplerPushOffset_ = {}, + uint32_t samplerAddressOffset_ = {}, + uint32_t samplerHeapIndexStride_ = {}, + uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT + : heapOffset{ heapOffset_ } + , pushOffset{ pushOffset_ } + , addressOffset{ addressOffset_ } + , heapIndexStride{ heapIndexStride_ } + , heapArrayStride{ heapArrayStride_ } + , pEmbeddedSampler{ pEmbeddedSampler_ } + , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } + , samplerHeapOffset{ samplerHeapOffset_ } + , samplerPushOffset{ samplerPushOffset_ } + , samplerAddressOffset{ samplerAddressOffset_ } + , samplerHeapIndexStride{ samplerHeapIndexStride_ } + , samplerHeapArrayStride{ samplerHeapArrayStride_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexEXT( DescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourceIndirectIndexEXT( VkDescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourceIndirectIndexEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourceIndirectIndexEXT & operator=( DescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourceIndirectIndexEXT & operator=( VkDescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setAddressOffset( uint32_t addressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + addressOffset = addressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setAddressOffset( uint32_t addressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + addressOffset = addressOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerPushOffset( uint32_t samplerPushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerPushOffset = samplerPushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerPushOffset( uint32_t samplerPushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerPushOffset = samplerPushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerAddressOffset = samplerAddressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerAddressOffset = samplerAddressOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceIndirectIndexEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectIndexEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectIndexEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectIndexEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( heapOffset, + pushOffset, + addressOffset, + heapIndexStride, + heapArrayStride, + pEmbeddedSampler, + useCombinedImageSamplerIndex, + samplerHeapOffset, + samplerPushOffset, + samplerAddressOffset, + samplerHeapIndexStride, + samplerHeapArrayStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourceIndirectIndexEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourceIndirectIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ) && ( addressOffset == rhs.addressOffset ) && + ( heapIndexStride == rhs.heapIndexStride ) && ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && + ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && + ( samplerPushOffset == rhs.samplerPushOffset ) && ( samplerAddressOffset == rhs.samplerAddressOffset ) && + ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ) && ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); +# endif + } + + bool operator!=( DescriptorMappingSourceIndirectIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t heapOffset = {}; + uint32_t pushOffset = {}; + uint32_t addressOffset = {}; + uint32_t heapIndexStride = {}; + uint32_t heapArrayStride = {}; + const SamplerCreateInfo * pEmbeddedSampler = {}; + Bool32 useCombinedImageSamplerIndex = {}; + uint32_t samplerHeapOffset = {}; + uint32_t samplerPushOffset = {}; + uint32_t samplerAddressOffset = {}; + uint32_t samplerHeapIndexStride = {}; + uint32_t samplerHeapArrayStride = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceIndirectIndexEXT; + }; +#endif + + // wrapper struct for struct VkDescriptorMappingSourceIndirectIndexArrayEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceIndirectIndexArrayEXT.html + struct DescriptorMappingSourceIndirectIndexArrayEXT + { + using NativeType = VkDescriptorMappingSourceIndirectIndexArrayEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexArrayEXT( uint32_t heapOffset_ = {}, + uint32_t pushOffset_ = {}, + uint32_t addressOffset_ = {}, + uint32_t heapIndexStride_ = {}, + const SamplerCreateInfo * pEmbeddedSampler_ = {}, + Bool32 useCombinedImageSamplerIndex_ = {}, + uint32_t samplerHeapOffset_ = {}, + uint32_t samplerPushOffset_ = {}, + uint32_t samplerAddressOffset_ = {}, + uint32_t samplerHeapIndexStride_ = {} ) VULKAN_HPP_NOEXCEPT + : heapOffset{ heapOffset_ } + , pushOffset{ pushOffset_ } + , addressOffset{ addressOffset_ } + , heapIndexStride{ heapIndexStride_ } + , pEmbeddedSampler{ pEmbeddedSampler_ } + , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } + , samplerHeapOffset{ samplerHeapOffset_ } + , samplerPushOffset{ samplerPushOffset_ } + , samplerAddressOffset{ samplerAddressOffset_ } + , samplerHeapIndexStride{ samplerHeapIndexStride_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexArrayEXT( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourceIndirectIndexArrayEXT( VkDescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourceIndirectIndexArrayEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourceIndirectIndexArrayEXT & operator=( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourceIndirectIndexArrayEXT & operator=( VkDescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setAddressOffset( uint32_t addressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + addressOffset = addressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setAddressOffset( uint32_t addressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + addressOffset = addressOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & + VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && + VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerPushOffset( uint32_t samplerPushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerPushOffset = samplerPushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerPushOffset( uint32_t samplerPushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerPushOffset = samplerPushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerAddressOffset = samplerAddressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerAddressOffset = samplerAddressOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceIndirectIndexArrayEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectIndexArrayEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectIndexArrayEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectIndexArrayEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( heapOffset, + pushOffset, + addressOffset, + heapIndexStride, + pEmbeddedSampler, + useCombinedImageSamplerIndex, + samplerHeapOffset, + samplerPushOffset, + samplerAddressOffset, + samplerHeapIndexStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourceIndirectIndexArrayEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ) && ( addressOffset == rhs.addressOffset ) && + ( heapIndexStride == rhs.heapIndexStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && + ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && + ( samplerPushOffset == rhs.samplerPushOffset ) && ( samplerAddressOffset == rhs.samplerAddressOffset ) && + ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ); +# endif + } + + bool operator!=( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t heapOffset = {}; + uint32_t pushOffset = {}; + uint32_t addressOffset = {}; + uint32_t heapIndexStride = {}; + const SamplerCreateInfo * pEmbeddedSampler = {}; + Bool32 useCombinedImageSamplerIndex = {}; + uint32_t samplerHeapOffset = {}; + uint32_t samplerPushOffset = {}; + uint32_t samplerAddressOffset = {}; + uint32_t samplerHeapIndexStride = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceIndirectIndexArrayEXT; + }; +#endif + + // wrapper struct for struct VkDescriptorMappingSourceHeapDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceHeapDataEXT.html + struct DescriptorMappingSourceHeapDataEXT + { + using NativeType = VkDescriptorMappingSourceHeapDataEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceHeapDataEXT( uint32_t heapOffset_ = {}, uint32_t pushOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : heapOffset{ heapOffset_ } + , pushOffset{ pushOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceHeapDataEXT( DescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourceHeapDataEXT( VkDescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourceHeapDataEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourceHeapDataEXT & operator=( DescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourceHeapDataEXT & operator=( VkDescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceHeapDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceHeapDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceHeapDataEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceHeapDataEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( heapOffset, pushOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourceHeapDataEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourceHeapDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ); +# endif + } + + bool operator!=( DescriptorMappingSourceHeapDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t heapOffset = {}; + uint32_t pushOffset = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceHeapDataEXT; + }; +#endif + + // wrapper struct for struct VkDescriptorMappingSourceIndirectAddressEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceIndirectAddressEXT.html + struct DescriptorMappingSourceIndirectAddressEXT + { + using NativeType = VkDescriptorMappingSourceIndirectAddressEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectAddressEXT( uint32_t pushOffset_ = {}, uint32_t addressOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : pushOffset{ pushOffset_ } + , addressOffset{ addressOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectAddressEXT( DescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourceIndirectAddressEXT( VkDescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourceIndirectAddressEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourceIndirectAddressEXT & operator=( DescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourceIndirectAddressEXT & operator=( VkDescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushOffset = pushOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT & setAddressOffset( uint32_t addressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + addressOffset = addressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT && setAddressOffset( uint32_t addressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + addressOffset = addressOffset_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceIndirectAddressEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectAddressEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectAddressEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceIndirectAddressEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pushOffset, addressOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourceIndirectAddressEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourceIndirectAddressEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pushOffset == rhs.pushOffset ) && ( addressOffset == rhs.addressOffset ); +# endif + } + + bool operator!=( DescriptorMappingSourceIndirectAddressEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t pushOffset = {}; + uint32_t addressOffset = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceIndirectAddressEXT; + }; +#endif + + // wrapper struct for struct VkDescriptorMappingSourceShaderRecordIndexEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceShaderRecordIndexEXT.html + struct DescriptorMappingSourceShaderRecordIndexEXT + { + using NativeType = VkDescriptorMappingSourceShaderRecordIndexEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceShaderRecordIndexEXT( uint32_t heapOffset_ = {}, + uint32_t shaderRecordOffset_ = {}, + uint32_t heapIndexStride_ = {}, + uint32_t heapArrayStride_ = {}, + const SamplerCreateInfo * pEmbeddedSampler_ = {}, + Bool32 useCombinedImageSamplerIndex_ = {}, + uint32_t samplerHeapOffset_ = {}, + uint32_t samplerShaderRecordOffset_ = {}, + uint32_t samplerHeapIndexStride_ = {}, + uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT + : heapOffset{ heapOffset_ } + , shaderRecordOffset{ shaderRecordOffset_ } + , heapIndexStride{ heapIndexStride_ } + , heapArrayStride{ heapArrayStride_ } + , pEmbeddedSampler{ pEmbeddedSampler_ } + , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } + , samplerHeapOffset{ samplerHeapOffset_ } + , samplerShaderRecordOffset{ samplerShaderRecordOffset_ } + , samplerHeapIndexStride{ samplerHeapIndexStride_ } + , samplerHeapArrayStride{ samplerHeapArrayStride_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorMappingSourceShaderRecordIndexEXT( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorMappingSourceShaderRecordIndexEXT( VkDescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorMappingSourceShaderRecordIndexEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorMappingSourceShaderRecordIndexEXT & operator=( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorMappingSourceShaderRecordIndexEXT & operator=( VkDescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + heapOffset = heapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setShaderRecordOffset( uint32_t shaderRecordOffset_ ) & VULKAN_HPP_NOEXCEPT + { + shaderRecordOffset = shaderRecordOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setShaderRecordOffset( uint32_t shaderRecordOffset_ ) && VULKAN_HPP_NOEXCEPT + { + shaderRecordOffset = shaderRecordOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapIndexStride = heapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + heapArrayStride = heapArrayStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & + VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && + VULKAN_HPP_NOEXCEPT + { + pEmbeddedSampler = pEmbeddedSampler_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && + VULKAN_HPP_NOEXCEPT + { + useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapOffset = samplerHeapOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerShaderRecordOffset( uint32_t samplerShaderRecordOffset_ ) & + VULKAN_HPP_NOEXCEPT + { + samplerShaderRecordOffset = samplerShaderRecordOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerShaderRecordOffset( uint32_t samplerShaderRecordOffset_ ) && + VULKAN_HPP_NOEXCEPT + { + samplerShaderRecordOffset = samplerShaderRecordOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapIndexStride = samplerHeapIndexStride_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT + { + samplerHeapArrayStride = samplerHeapArrayStride_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceShaderRecordIndexEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceShaderRecordIndexEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceShaderRecordIndexEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceShaderRecordIndexEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( heapOffset, + shaderRecordOffset, + heapIndexStride, + heapArrayStride, + pEmbeddedSampler, + useCombinedImageSamplerIndex, + samplerHeapOffset, + samplerShaderRecordOffset, + samplerHeapIndexStride, + samplerHeapArrayStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorMappingSourceShaderRecordIndexEXT const & ) const = default; +#else + bool operator==( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( heapOffset == rhs.heapOffset ) && ( shaderRecordOffset == rhs.shaderRecordOffset ) && ( heapIndexStride == rhs.heapIndexStride ) && + ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && + ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && + ( samplerShaderRecordOffset == rhs.samplerShaderRecordOffset ) && ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ) && + ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); +# endif + } + + bool operator!=( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t heapOffset = {}; + uint32_t shaderRecordOffset = {}; + uint32_t heapIndexStride = {}; + uint32_t heapArrayStride = {}; + const SamplerCreateInfo * pEmbeddedSampler = {}; + Bool32 useCombinedImageSamplerIndex = {}; + uint32_t samplerHeapOffset = {}; + uint32_t samplerShaderRecordOffset = {}; + uint32_t samplerHeapIndexStride = {}; + uint32_t samplerHeapArrayStride = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceShaderRecordIndexEXT; + }; +#endif + + union DescriptorMappingSourceDataEXT + { + using NativeType = VkDescriptorMappingSourceDataEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceConstantOffsetEXT constantOffset_ = {} ) : constantOffset( constantOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourcePushIndexEXT pushIndex_ ) : pushIndex( pushIndex_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceIndirectIndexEXT indirectIndex_ ) : indirectIndex( indirectIndex_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray_ ) + : indirectIndexArray( indirectIndexArray_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceHeapDataEXT heapData_ ) : heapData( heapData_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( uint32_t uint32_t_ ) : pushDataOffset( uint32_t_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceIndirectAddressEXT indirectAddress_ ) : indirectAddress( indirectAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex_ ) + : shaderRecordIndex( shaderRecordIndex_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setConstantOffset( DescriptorMappingSourceConstantOffsetEXT const & constantOffset_ ) & + VULKAN_HPP_NOEXCEPT + { + constantOffset = constantOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setConstantOffset( DescriptorMappingSourceConstantOffsetEXT const & constantOffset_ ) && + VULKAN_HPP_NOEXCEPT + { + constantOffset = constantOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setPushIndex( DescriptorMappingSourcePushIndexEXT const & pushIndex_ ) & VULKAN_HPP_NOEXCEPT + { + pushIndex = pushIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setPushIndex( DescriptorMappingSourcePushIndexEXT const & pushIndex_ ) && VULKAN_HPP_NOEXCEPT + { + pushIndex = pushIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setIndirectIndex( DescriptorMappingSourceIndirectIndexEXT const & indirectIndex_ ) & + VULKAN_HPP_NOEXCEPT + { + indirectIndex = indirectIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setIndirectIndex( DescriptorMappingSourceIndirectIndexEXT const & indirectIndex_ ) && + VULKAN_HPP_NOEXCEPT + { + indirectIndex = indirectIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setIndirectIndexArray( DescriptorMappingSourceIndirectIndexArrayEXT const & indirectIndexArray_ ) & + VULKAN_HPP_NOEXCEPT + { + indirectIndexArray = indirectIndexArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && + setIndirectIndexArray( DescriptorMappingSourceIndirectIndexArrayEXT const & indirectIndexArray_ ) && + VULKAN_HPP_NOEXCEPT + { + indirectIndexArray = indirectIndexArray_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setHeapData( DescriptorMappingSourceHeapDataEXT const & heapData_ ) & VULKAN_HPP_NOEXCEPT + { + heapData = heapData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setHeapData( DescriptorMappingSourceHeapDataEXT const & heapData_ ) && VULKAN_HPP_NOEXCEPT + { + heapData = heapData_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setPushDataOffset( uint32_t pushDataOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushDataOffset = pushDataOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setPushDataOffset( uint32_t pushDataOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushDataOffset = pushDataOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setPushAddressOffset( uint32_t pushAddressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushAddressOffset = pushAddressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setPushAddressOffset( uint32_t pushAddressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushAddressOffset = pushAddressOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setIndirectAddress( DescriptorMappingSourceIndirectAddressEXT const & indirectAddress_ ) & + VULKAN_HPP_NOEXCEPT + { + indirectAddress = indirectAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setIndirectAddress( DescriptorMappingSourceIndirectAddressEXT const & indirectAddress_ ) && + VULKAN_HPP_NOEXCEPT + { + indirectAddress = indirectAddress_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setShaderRecordIndex( DescriptorMappingSourceShaderRecordIndexEXT const & shaderRecordIndex_ ) & + VULKAN_HPP_NOEXCEPT + { + shaderRecordIndex = shaderRecordIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setShaderRecordIndex( DescriptorMappingSourceShaderRecordIndexEXT const & shaderRecordIndex_ ) && + VULKAN_HPP_NOEXCEPT + { + shaderRecordIndex = shaderRecordIndex_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setShaderRecordDataOffset( uint32_t shaderRecordDataOffset_ ) & VULKAN_HPP_NOEXCEPT + { + shaderRecordDataOffset = shaderRecordDataOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setShaderRecordDataOffset( uint32_t shaderRecordDataOffset_ ) && VULKAN_HPP_NOEXCEPT + { + shaderRecordDataOffset = shaderRecordDataOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setShaderRecordAddressOffset( uint32_t shaderRecordAddressOffset_ ) & VULKAN_HPP_NOEXCEPT + { + shaderRecordAddressOffset = shaderRecordAddressOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setShaderRecordAddressOffset( uint32_t shaderRecordAddressOffset_ ) && VULKAN_HPP_NOEXCEPT + { + shaderRecordAddressOffset = shaderRecordAddressOffset_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorMappingSourceDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorMappingSourceDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + DescriptorMappingSourceConstantOffsetEXT constantOffset; + DescriptorMappingSourcePushIndexEXT pushIndex; + DescriptorMappingSourceIndirectIndexEXT indirectIndex; + DescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray; + DescriptorMappingSourceHeapDataEXT heapData; + uint32_t pushDataOffset; + uint32_t pushAddressOffset; + DescriptorMappingSourceIndirectAddressEXT indirectAddress; + DescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex; + uint32_t shaderRecordDataOffset; + uint32_t shaderRecordAddressOffset; +#else + VkDescriptorMappingSourceConstantOffsetEXT constantOffset; + VkDescriptorMappingSourcePushIndexEXT pushIndex; + VkDescriptorMappingSourceIndirectIndexEXT indirectIndex; + VkDescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray; + VkDescriptorMappingSourceHeapDataEXT heapData; + uint32_t pushDataOffset; + uint32_t pushAddressOffset; + VkDescriptorMappingSourceIndirectAddressEXT indirectAddress; + VkDescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex; + uint32_t shaderRecordDataOffset; + uint32_t shaderRecordAddressOffset; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorMappingSourceDataEXT; + }; +#endif + // wrapper struct for struct VkDescriptorPoolSize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolSize.html struct DescriptorPoolSize { @@ -42863,6 +45341,195 @@ namespace VULKAN_HPP_NAMESPACE using Type = DescriptorSetAllocateInfo; }; + // wrapper struct for struct VkDescriptorSetAndBindingMappingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetAndBindingMappingEXT.html + struct DescriptorSetAndBindingMappingEXT + { + using NativeType = VkDescriptorSetAndBindingMappingEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAndBindingMappingEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT( uint32_t descriptorSet_ = {}, + uint32_t firstBinding_ = {}, + uint32_t bindingCount_ = {}, + SpirvResourceTypeFlagsEXT resourceMask_ = {}, + DescriptorMappingSourceEXT source_ = DescriptorMappingSourceEXT::eHeapWithConstantOffset, + DescriptorMappingSourceDataEXT sourceData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSet{ descriptorSet_ } + , firstBinding{ firstBinding_ } + , bindingCount{ bindingCount_ } + , resourceMask{ resourceMask_ } + , source{ source_ } + , sourceData{ sourceData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT( DescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAndBindingMappingEXT( VkDescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetAndBindingMappingEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetAndBindingMappingEXT & operator=( DescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetAndBindingMappingEXT & operator=( VkDescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setDescriptorSet( uint32_t descriptorSet_ ) & VULKAN_HPP_NOEXCEPT + { + descriptorSet = descriptorSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setDescriptorSet( uint32_t descriptorSet_ ) && VULKAN_HPP_NOEXCEPT + { + descriptorSet = descriptorSet_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setFirstBinding( uint32_t firstBinding_ ) & VULKAN_HPP_NOEXCEPT + { + firstBinding = firstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setFirstBinding( uint32_t firstBinding_ ) && VULKAN_HPP_NOEXCEPT + { + firstBinding = firstBinding_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setBindingCount( uint32_t bindingCount_ ) & VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setBindingCount( uint32_t bindingCount_ ) && VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setResourceMask( SpirvResourceTypeFlagsEXT resourceMask_ ) & VULKAN_HPP_NOEXCEPT + { + resourceMask = resourceMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setResourceMask( SpirvResourceTypeFlagsEXT resourceMask_ ) && VULKAN_HPP_NOEXCEPT + { + resourceMask = resourceMask_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setSource( DescriptorMappingSourceEXT source_ ) & VULKAN_HPP_NOEXCEPT + { + source = source_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setSource( DescriptorMappingSourceEXT source_ ) && VULKAN_HPP_NOEXCEPT + { + source = source_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setSourceData( DescriptorMappingSourceDataEXT const & sourceData_ ) & VULKAN_HPP_NOEXCEPT + { + sourceData = sourceData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setSourceData( DescriptorMappingSourceDataEXT const & sourceData_ ) && VULKAN_HPP_NOEXCEPT + { + sourceData = sourceData_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetAndBindingMappingEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAndBindingMappingEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAndBindingMappingEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetAndBindingMappingEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSet, firstBinding, bindingCount, resourceMask, source, sourceData ); + } +#endif + + public: + StructureType sType = StructureType::eDescriptorSetAndBindingMappingEXT; + const void * pNext = {}; + uint32_t descriptorSet = {}; + uint32_t firstBinding = {}; + uint32_t bindingCount = {}; + SpirvResourceTypeFlagsEXT resourceMask = {}; + DescriptorMappingSourceEXT source = DescriptorMappingSourceEXT::eHeapWithConstantOffset; + DescriptorMappingSourceDataEXT sourceData = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = DescriptorSetAndBindingMappingEXT; + }; +#endif + + template <> + struct CppType + { + using Type = DescriptorSetAndBindingMappingEXT; + }; + // wrapper struct for struct VkDescriptorSetBindingReferenceVALVE, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetBindingReferenceVALVE.html struct DescriptorSetBindingReferenceVALVE @@ -68882,6 +71549,263 @@ namespace VULKAN_HPP_NAMESPACE using Type = HeadlessSurfaceCreateInfoEXT; }; + // wrapper struct for struct VkHostAddressRangeConstEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostAddressRangeConstEXT.html + struct HostAddressRangeConstEXT + { + using NativeType = VkHostAddressRangeConstEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostAddressRangeConstEXT( const void * address_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : address{ address_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR HostAddressRangeConstEXT( HostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostAddressRangeConstEXT( VkHostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HostAddressRangeConstEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HostAddressRangeConstEXT( ArrayProxyNoTemporaries const & address_ ) : address( address_.data() ), size( address_.size() * sizeof( T ) ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + HostAddressRangeConstEXT & operator=( HostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HostAddressRangeConstEXT & operator=( VkHostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT & setAddress( const void * address_ ) & VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT && setAddress( const void * address_ ) && VULKAN_HPP_NOEXCEPT + { + address = address_; + return std::move( *this ); + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HostAddressRangeConstEXT & setAddress( ArrayProxyNoTemporaries const & address_ ) VULKAN_HPP_NOEXCEPT + { + size = address_.size() * sizeof( T ); + address = address_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT & setSize( size_t size_ ) & VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT && setSize( size_t size_ ) && VULKAN_HPP_NOEXCEPT + { + size = size_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkHostAddressRangeConstEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostAddressRangeConstEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostAddressRangeConstEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHostAddressRangeConstEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( address, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostAddressRangeConstEXT const & ) const = default; +#else + bool operator==( HostAddressRangeConstEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( address == rhs.address ) && ( size == rhs.size ); +# endif + } + + bool operator!=( HostAddressRangeConstEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const void * address = {}; + size_t size = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = HostAddressRangeConstEXT; + }; +#endif + + // wrapper struct for struct VkHostAddressRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostAddressRangeEXT.html + struct HostAddressRangeEXT + { + using NativeType = VkHostAddressRangeEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostAddressRangeEXT( void * address_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : address{ address_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR HostAddressRangeEXT( HostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostAddressRangeEXT( VkHostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HostAddressRangeEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HostAddressRangeEXT( ArrayProxyNoTemporaries const & address_ ) : address( address_.data() ), size( address_.size() * sizeof( T ) ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + HostAddressRangeEXT & operator=( HostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HostAddressRangeEXT & operator=( VkHostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT & setAddress( void * address_ ) & VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT && setAddress( void * address_ ) && VULKAN_HPP_NOEXCEPT + { + address = address_; + return std::move( *this ); + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HostAddressRangeEXT & setAddress( ArrayProxyNoTemporaries const & address_ ) VULKAN_HPP_NOEXCEPT + { + size = address_.size() * sizeof( T ); + address = address_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT & setSize( size_t size_ ) & VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT && setSize( size_t size_ ) && VULKAN_HPP_NOEXCEPT + { + size = size_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkHostAddressRangeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostAddressRangeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostAddressRangeEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHostAddressRangeEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( address, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostAddressRangeEXT const & ) const = default; +#else + bool operator==( HostAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( address == rhs.address ) && ( size == rhs.size ); +# endif + } + + bool operator!=( HostAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + void * address = {}; + size_t size = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = HostAddressRangeEXT; + }; +#endif + // wrapper struct for struct VkHostImageCopyDevicePerformanceQuery, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyDevicePerformanceQuery.html struct HostImageCopyDevicePerformanceQuery @@ -70566,6 +73490,350 @@ namespace VULKAN_HPP_NAMESPACE }; #endif + // wrapper struct for struct VkImageViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateInfo.html + struct ImageViewCreateInfo + { + using NativeType = VkImageViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateFlags flags_ = {}, + Image image_ = {}, + ImageViewType viewType_ = ImageViewType::e1D, + Format format_ = Format::eUndefined, + ComponentMapping components_ = {}, + ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , image{ image_ } + , viewType{ viewType_ } + , format{ format_ } + , components{ components_ } + , subresourceRange{ subresourceRange_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( ImageViewCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setFlags( ImageViewCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT + { + image = image_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( ImageViewType viewType_ ) & VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setViewType( ImageViewType viewType_ ) && VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT + { + format = format_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( ComponentMapping const & components_ ) & VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setComponents( ComponentMapping const & components_ ) && VULKAN_HPP_NOEXCEPT + { + components = components_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) & VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) && VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; +#else + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && + ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + ImageViewCreateFlags flags = {}; + Image image = {}; + ImageViewType viewType = ImageViewType::e1D; + Format format = Format::eUndefined; + ComponentMapping components = {}; + ImageSubresourceRange subresourceRange = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = ImageViewCreateInfo; + }; +#endif + + template <> + struct CppType + { + using Type = ImageViewCreateInfo; + }; + + // wrapper struct for struct VkImageDescriptorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDescriptorInfoEXT.html + struct ImageDescriptorInfoEXT + { + using NativeType = VkImageDescriptorInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDescriptorInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDescriptorInfoEXT( const ImageViewCreateInfo * pView_ = {}, + ImageLayout layout_ = ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pView{ pView_ } + , layout{ layout_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageDescriptorInfoEXT( ImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDescriptorInfoEXT( VkImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDescriptorInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageDescriptorInfoEXT & operator=( ImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageDescriptorInfoEXT & operator=( VkImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT & setPView( const ImageViewCreateInfo * pView_ ) & VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT && setPView( const ImageViewCreateInfo * pView_ ) && VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT & setLayout( ImageLayout layout_ ) & VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT && setLayout( ImageLayout layout_ ) && VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageDescriptorInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDescriptorInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDescriptorInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageDescriptorInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pView, layout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDescriptorInfoEXT const & ) const = default; +#else + bool operator==( ImageDescriptorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pView == rhs.pView ) && ( layout == rhs.layout ); +# endif + } + + bool operator!=( ImageDescriptorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eImageDescriptorInfoEXT; + const void * pNext = {}; + const ImageViewCreateInfo * pView = {}; + ImageLayout layout = ImageLayout::eUndefined; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = ImageDescriptorInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = ImageDescriptorInfoEXT; + }; + // wrapper struct for struct VkSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout.html struct SubresourceLayout { @@ -73103,212 +76371,6 @@ namespace VULKAN_HPP_NAMESPACE using Type = ImageViewCaptureDescriptorDataInfoEXT; }; - // wrapper struct for struct VkImageViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateInfo.html - struct ImageViewCreateInfo - { - using NativeType = VkImageViewCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; - -#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateFlags flags_ = {}, - Image image_ = {}, - ImageViewType viewType_ = ImageViewType::e1D, - Format format_ = Format::eUndefined, - ComponentMapping components_ = {}, - ImageSubresourceRange subresourceRange_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , flags{ flags_ } - , image{ image_ } - , viewType{ viewType_ } - , format{ format_ } - , components{ components_ } - , subresourceRange{ subresourceRange_ } - { - } - - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) - { - } - - ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - - ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( ImageViewCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setFlags( ImageViewCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT - { - image = image_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( ImageViewType viewType_ ) & VULKAN_HPP_NOEXCEPT - { - viewType = viewType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setViewType( ImageViewType viewType_ ) && VULKAN_HPP_NOEXCEPT - { - viewType = viewType_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT - { - format = format_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( ComponentMapping const & components_ ) & VULKAN_HPP_NOEXCEPT - { - components = components_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setComponents( ComponentMapping const & components_ ) && VULKAN_HPP_NOEXCEPT - { - components = components_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) & VULKAN_HPP_NOEXCEPT - { - subresourceRange = subresourceRange_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) && VULKAN_HPP_NOEXCEPT - { - subresourceRange = subresourceRange_; - return std::move( *this ); - } -#endif /*VULKAN_HPP_NO_SETTERS*/ - - operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT - { - return reinterpret_cast( this ); - } - - operator VkImageViewCreateInfo *() VULKAN_HPP_NOEXCEPT - { - return reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) - std::tuple - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewCreateInfo const & ) const = default; -#else - bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && - ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); -# endif - } - - bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - StructureType sType = StructureType::eImageViewCreateInfo; - const void * pNext = {}; - ImageViewCreateFlags flags = {}; - Image image = {}; - ImageViewType viewType = ImageViewType::e1D; - Format format = Format::eUndefined; - ComponentMapping components = {}; - ImageSubresourceRange subresourceRange = {}; - }; - -#if 20 <= VULKAN_HPP_CPP_VERSION - template <> - struct CppType - { - using Type = ImageViewCreateInfo; - }; -#endif - - template <> - struct CppType - { - using Type = ImageViewCreateInfo; - }; - // wrapper struct for struct VkImageViewHandleInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewHandleInfoNVX.html struct ImageViewHandleInfoNVX { @@ -78461,6 +81523,144 @@ namespace VULKAN_HPP_NAMESPACE using Type = IndirectCommandsLayoutCreateInfoNV; }; + // wrapper struct for struct VkIndirectCommandsLayoutPushDataTokenNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutPushDataTokenNV.html + struct IndirectCommandsLayoutPushDataTokenNV + { + using NativeType = VkIndirectCommandsLayoutPushDataTokenNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutPushDataTokenNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutPushDataTokenNV( uint32_t pushDataOffset_ = {}, uint32_t pushDataSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pushDataOffset{ pushDataOffset_ } + , pushDataSize{ pushDataSize_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutPushDataTokenNV( IndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutPushDataTokenNV( VkIndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutPushDataTokenNV( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsLayoutPushDataTokenNV & operator=( IndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutPushDataTokenNV & operator=( VkIndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV & setPushDataOffset( uint32_t pushDataOffset_ ) & VULKAN_HPP_NOEXCEPT + { + pushDataOffset = pushDataOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV && setPushDataOffset( uint32_t pushDataOffset_ ) && VULKAN_HPP_NOEXCEPT + { + pushDataOffset = pushDataOffset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV & setPushDataSize( uint32_t pushDataSize_ ) & VULKAN_HPP_NOEXCEPT + { + pushDataSize = pushDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV && setPushDataSize( uint32_t pushDataSize_ ) && VULKAN_HPP_NOEXCEPT + { + pushDataSize = pushDataSize_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsLayoutPushDataTokenNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutPushDataTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutPushDataTokenNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutPushDataTokenNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pushDataOffset, pushDataSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutPushDataTokenNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutPushDataTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pushDataOffset == rhs.pushDataOffset ) && ( pushDataSize == rhs.pushDataSize ); +# endif + } + + bool operator!=( IndirectCommandsLayoutPushDataTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eIndirectCommandsLayoutPushDataTokenNV; + const void * pNext = {}; + uint32_t pushDataOffset = {}; + uint32_t pushDataSize = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = IndirectCommandsLayoutPushDataTokenNV; + }; +#endif + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutPushDataTokenNV; + }; + // wrapper struct for struct VkIndirectExecutionSetPipelineInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetPipelineInfoEXT.html struct IndirectExecutionSetPipelineInfoEXT @@ -86968,6 +90168,129 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_OHOS*/ + // wrapper struct for struct VkOpaqueCaptureDataCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDataCreateInfoEXT.html + struct OpaqueCaptureDataCreateInfoEXT + { + using NativeType = VkOpaqueCaptureDataCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpaqueCaptureDataCreateInfoEXT( const HostAddressRangeConstEXT * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR OpaqueCaptureDataCreateInfoEXT( OpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpaqueCaptureDataCreateInfoEXT( VkOpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : OpaqueCaptureDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + OpaqueCaptureDataCreateInfoEXT & operator=( OpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpaqueCaptureDataCreateInfoEXT & operator=( VkOpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT & setPData( const HostAddressRangeConstEXT * pData_ ) & VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT && setPData( const HostAddressRangeConstEXT * pData_ ) && VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpaqueCaptureDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDataCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDataCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpaqueCaptureDataCreateInfoEXT const & ) const = default; +#else + bool operator==( OpaqueCaptureDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( OpaqueCaptureDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eOpaqueCaptureDataCreateInfoEXT; + const void * pNext = {}; + const HostAddressRangeConstEXT * pData = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = OpaqueCaptureDataCreateInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = OpaqueCaptureDataCreateInfoEXT; + }; + // wrapper struct for struct VkOpaqueCaptureDescriptorDataCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDescriptorDataCreateInfoEXT.html struct OpaqueCaptureDescriptorDataCreateInfoEXT @@ -95172,6 +98495,134 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; }; + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM.html + struct PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( Bool32 cooperativeMatrixConversion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixConversion{ cooperativeMatrixConversion_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & + operator=( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & operator=( VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & setCooperativeMatrixConversion( Bool32 cooperativeMatrixConversion_ ) & + VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixConversion = cooperativeMatrixConversion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM && setCooperativeMatrixConversion( Bool32 cooperativeMatrixConversion_ ) && + VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixConversion = cooperativeMatrixConversion_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixConversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixConversion == rhs.cooperativeMatrixConversion ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + void * pNext = {}; + Bool32 cooperativeMatrixConversion = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; + }; + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesKHR.html struct PhysicalDeviceCooperativeMatrixFeaturesKHR @@ -99578,6 +103029,458 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceDescriptorBufferTensorPropertiesARM; }; + // wrapper struct for struct VkPhysicalDeviceDescriptorHeapFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorHeapFeaturesEXT.html + struct PhysicalDeviceDescriptorHeapFeaturesEXT + { + using NativeType = VkPhysicalDeviceDescriptorHeapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorHeapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapFeaturesEXT( Bool32 descriptorHeap_ = {}, + Bool32 descriptorHeapCaptureReplay_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorHeap{ descriptorHeap_ } + , descriptorHeapCaptureReplay{ descriptorHeapCaptureReplay_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapFeaturesEXT( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorHeapFeaturesEXT( VkPhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorHeapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorHeapFeaturesEXT & operator=( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorHeapFeaturesEXT & operator=( VkPhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT & setDescriptorHeap( Bool32 descriptorHeap_ ) & VULKAN_HPP_NOEXCEPT + { + descriptorHeap = descriptorHeap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT && setDescriptorHeap( Bool32 descriptorHeap_ ) && VULKAN_HPP_NOEXCEPT + { + descriptorHeap = descriptorHeap_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT & setDescriptorHeapCaptureReplay( Bool32 descriptorHeapCaptureReplay_ ) & + VULKAN_HPP_NOEXCEPT + { + descriptorHeapCaptureReplay = descriptorHeapCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT && setDescriptorHeapCaptureReplay( Bool32 descriptorHeapCaptureReplay_ ) && + VULKAN_HPP_NOEXCEPT + { + descriptorHeapCaptureReplay = descriptorHeapCaptureReplay_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorHeapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorHeap, descriptorHeapCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorHeapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorHeap == rhs.descriptorHeap ) && + ( descriptorHeapCaptureReplay == rhs.descriptorHeapCaptureReplay ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDeviceDescriptorHeapFeaturesEXT; + void * pNext = {}; + Bool32 descriptorHeap = {}; + Bool32 descriptorHeapCaptureReplay = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorHeapFeaturesEXT; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorHeapFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDescriptorHeapPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorHeapPropertiesEXT.html + struct PhysicalDeviceDescriptorHeapPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorHeapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorHeapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapPropertiesEXT( DeviceSize samplerHeapAlignment_ = {}, + DeviceSize resourceHeapAlignment_ = {}, + DeviceSize maxSamplerHeapSize_ = {}, + DeviceSize maxResourceHeapSize_ = {}, + DeviceSize minSamplerHeapReservedRange_ = {}, + DeviceSize minSamplerHeapReservedRangeWithEmbedded_ = {}, + DeviceSize minResourceHeapReservedRange_ = {}, + DeviceSize samplerDescriptorSize_ = {}, + DeviceSize imageDescriptorSize_ = {}, + DeviceSize bufferDescriptorSize_ = {}, + DeviceSize samplerDescriptorAlignment_ = {}, + DeviceSize imageDescriptorAlignment_ = {}, + DeviceSize bufferDescriptorAlignment_ = {}, + DeviceSize maxPushDataSize_ = {}, + size_t imageCaptureReplayOpaqueDataSize_ = {}, + uint32_t maxDescriptorHeapEmbeddedSamplers_ = {}, + uint32_t samplerYcbcrConversionCount_ = {}, + Bool32 sparseDescriptorHeaps_ = {}, + Bool32 protectedDescriptorHeaps_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , samplerHeapAlignment{ samplerHeapAlignment_ } + , resourceHeapAlignment{ resourceHeapAlignment_ } + , maxSamplerHeapSize{ maxSamplerHeapSize_ } + , maxResourceHeapSize{ maxResourceHeapSize_ } + , minSamplerHeapReservedRange{ minSamplerHeapReservedRange_ } + , minSamplerHeapReservedRangeWithEmbedded{ minSamplerHeapReservedRangeWithEmbedded_ } + , minResourceHeapReservedRange{ minResourceHeapReservedRange_ } + , samplerDescriptorSize{ samplerDescriptorSize_ } + , imageDescriptorSize{ imageDescriptorSize_ } + , bufferDescriptorSize{ bufferDescriptorSize_ } + , samplerDescriptorAlignment{ samplerDescriptorAlignment_ } + , imageDescriptorAlignment{ imageDescriptorAlignment_ } + , bufferDescriptorAlignment{ bufferDescriptorAlignment_ } + , maxPushDataSize{ maxPushDataSize_ } + , imageCaptureReplayOpaqueDataSize{ imageCaptureReplayOpaqueDataSize_ } + , maxDescriptorHeapEmbeddedSamplers{ maxDescriptorHeapEmbeddedSamplers_ } + , samplerYcbcrConversionCount{ samplerYcbcrConversionCount_ } + , sparseDescriptorHeaps{ sparseDescriptorHeaps_ } + , protectedDescriptorHeaps{ protectedDescriptorHeaps_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapPropertiesEXT( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorHeapPropertiesEXT( VkPhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorHeapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorHeapPropertiesEXT & operator=( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorHeapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorHeapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + samplerHeapAlignment, + resourceHeapAlignment, + maxSamplerHeapSize, + maxResourceHeapSize, + minSamplerHeapReservedRange, + minSamplerHeapReservedRangeWithEmbedded, + minResourceHeapReservedRange, + samplerDescriptorSize, + imageDescriptorSize, + bufferDescriptorSize, + samplerDescriptorAlignment, + imageDescriptorAlignment, + bufferDescriptorAlignment, + maxPushDataSize, + imageCaptureReplayOpaqueDataSize, + maxDescriptorHeapEmbeddedSamplers, + samplerYcbcrConversionCount, + sparseDescriptorHeaps, + protectedDescriptorHeaps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorHeapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerHeapAlignment == rhs.samplerHeapAlignment ) && + ( resourceHeapAlignment == rhs.resourceHeapAlignment ) && ( maxSamplerHeapSize == rhs.maxSamplerHeapSize ) && + ( maxResourceHeapSize == rhs.maxResourceHeapSize ) && ( minSamplerHeapReservedRange == rhs.minSamplerHeapReservedRange ) && + ( minSamplerHeapReservedRangeWithEmbedded == rhs.minSamplerHeapReservedRangeWithEmbedded ) && + ( minResourceHeapReservedRange == rhs.minResourceHeapReservedRange ) && ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && + ( imageDescriptorSize == rhs.imageDescriptorSize ) && ( bufferDescriptorSize == rhs.bufferDescriptorSize ) && + ( samplerDescriptorAlignment == rhs.samplerDescriptorAlignment ) && ( imageDescriptorAlignment == rhs.imageDescriptorAlignment ) && + ( bufferDescriptorAlignment == rhs.bufferDescriptorAlignment ) && ( maxPushDataSize == rhs.maxPushDataSize ) && + ( imageCaptureReplayOpaqueDataSize == rhs.imageCaptureReplayOpaqueDataSize ) && + ( maxDescriptorHeapEmbeddedSamplers == rhs.maxDescriptorHeapEmbeddedSamplers ) && + ( samplerYcbcrConversionCount == rhs.samplerYcbcrConversionCount ) && ( sparseDescriptorHeaps == rhs.sparseDescriptorHeaps ) && + ( protectedDescriptorHeaps == rhs.protectedDescriptorHeaps ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDeviceDescriptorHeapPropertiesEXT; + void * pNext = {}; + DeviceSize samplerHeapAlignment = {}; + DeviceSize resourceHeapAlignment = {}; + DeviceSize maxSamplerHeapSize = {}; + DeviceSize maxResourceHeapSize = {}; + DeviceSize minSamplerHeapReservedRange = {}; + DeviceSize minSamplerHeapReservedRangeWithEmbedded = {}; + DeviceSize minResourceHeapReservedRange = {}; + DeviceSize samplerDescriptorSize = {}; + DeviceSize imageDescriptorSize = {}; + DeviceSize bufferDescriptorSize = {}; + DeviceSize samplerDescriptorAlignment = {}; + DeviceSize imageDescriptorAlignment = {}; + DeviceSize bufferDescriptorAlignment = {}; + DeviceSize maxPushDataSize = {}; + size_t imageCaptureReplayOpaqueDataSize = {}; + uint32_t maxDescriptorHeapEmbeddedSamplers = {}; + uint32_t samplerYcbcrConversionCount = {}; + Bool32 sparseDescriptorHeaps = {}; + Bool32 protectedDescriptorHeaps = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorHeapPropertiesEXT; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorHeapPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDescriptorHeapTensorPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorHeapTensorPropertiesARM.html + struct PhysicalDeviceDescriptorHeapTensorPropertiesARM + { + using NativeType = VkPhysicalDeviceDescriptorHeapTensorPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorHeapTensorPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapTensorPropertiesARM( DeviceSize tensorDescriptorSize_ = {}, + DeviceSize tensorDescriptorAlignment_ = {}, + size_t tensorCaptureReplayOpaqueDataSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tensorDescriptorSize{ tensorDescriptorSize_ } + , tensorDescriptorAlignment{ tensorDescriptorAlignment_ } + , tensorCaptureReplayOpaqueDataSize{ tensorCaptureReplayOpaqueDataSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorHeapTensorPropertiesARM( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorHeapTensorPropertiesARM( VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorHeapTensorPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorHeapTensorPropertiesARM & operator=( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorHeapTensorPropertiesARM & operator=( VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, tensorDescriptorSize, tensorDescriptorAlignment, tensorCaptureReplayOpaqueDataSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorDescriptorSize == rhs.tensorDescriptorSize ) && + ( tensorDescriptorAlignment == rhs.tensorDescriptorAlignment ) && ( tensorCaptureReplayOpaqueDataSize == rhs.tensorCaptureReplayOpaqueDataSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDeviceDescriptorHeapTensorPropertiesARM; + void * pNext = {}; + DeviceSize tensorDescriptorSize = {}; + DeviceSize tensorDescriptorAlignment = {}; + size_t tensorCaptureReplayOpaqueDataSize = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorHeapTensorPropertiesARM; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorHeapTensorPropertiesARM; + }; + // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingFeatures.html struct PhysicalDeviceDescriptorIndexingFeatures @@ -112149,6 +116052,134 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + // wrapper struct for struct VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR.html + struct PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR + { + using NativeType = VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( Bool32 internallySynchronizedQueues_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , internallySynchronizedQueues{ internallySynchronizedQueues_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & + operator=( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & operator=( VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & setInternallySynchronizedQueues( Bool32 internallySynchronizedQueues_ ) & + VULKAN_HPP_NOEXCEPT + { + internallySynchronizedQueues = internallySynchronizedQueues_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR && setInternallySynchronizedQueues( Bool32 internallySynchronizedQueues_ ) && + VULKAN_HPP_NOEXCEPT + { + internallySynchronizedQueues = internallySynchronizedQueues_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, internallySynchronizedQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( internallySynchronizedQueues == rhs.internallySynchronizedQueues ); +# endif + } + + bool operator!=( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + void * pNext = {}; + Bool32 internallySynchronizedQueues = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; + }; + // wrapper struct for struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInvocationMaskFeaturesHUAWEI.html struct PhysicalDeviceInvocationMaskFeaturesHUAWEI @@ -124873,6 +128904,239 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceProvokingVertexPropertiesEXT; }; + // wrapper struct for struct VkPhysicalDevicePushConstantBankFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushConstantBankFeaturesNV.html + struct PhysicalDevicePushConstantBankFeaturesNV + { + using NativeType = VkPhysicalDevicePushConstantBankFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushConstantBankFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankFeaturesNV( Bool32 pushConstantBank_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pushConstantBank{ pushConstantBank_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankFeaturesNV( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushConstantBankFeaturesNV( VkPhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushConstantBankFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePushConstantBankFeaturesNV & operator=( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePushConstantBankFeaturesNV & operator=( VkPhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV & setPushConstantBank( Bool32 pushConstantBank_ ) & VULKAN_HPP_NOEXCEPT + { + pushConstantBank = pushConstantBank_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV && setPushConstantBank( Bool32 pushConstantBank_ ) && VULKAN_HPP_NOEXCEPT + { + pushConstantBank = pushConstantBank_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePushConstantBankFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushConstantBankFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushConstantBankFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushConstantBankFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pushConstantBank ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushConstantBankFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pushConstantBank == rhs.pushConstantBank ); +# endif + } + + bool operator!=( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDevicePushConstantBankFeaturesNV; + void * pNext = {}; + Bool32 pushConstantBank = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDevicePushConstantBankFeaturesNV; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDevicePushConstantBankFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDevicePushConstantBankPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushConstantBankPropertiesNV.html + struct PhysicalDevicePushConstantBankPropertiesNV + { + using NativeType = VkPhysicalDevicePushConstantBankPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushConstantBankPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankPropertiesNV( uint32_t maxGraphicsPushConstantBanks_ = {}, + uint32_t maxComputePushConstantBanks_ = {}, + uint32_t maxGraphicsPushDataBanks_ = {}, + uint32_t maxComputePushDataBanks_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxGraphicsPushConstantBanks{ maxGraphicsPushConstantBanks_ } + , maxComputePushConstantBanks{ maxComputePushConstantBanks_ } + , maxGraphicsPushDataBanks{ maxGraphicsPushDataBanks_ } + , maxComputePushDataBanks{ maxComputePushDataBanks_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankPropertiesNV( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushConstantBankPropertiesNV( VkPhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushConstantBankPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePushConstantBankPropertiesNV & operator=( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePushConstantBankPropertiesNV & operator=( VkPhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePushConstantBankPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushConstantBankPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushConstantBankPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushConstantBankPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxGraphicsPushConstantBanks, maxComputePushConstantBanks, maxGraphicsPushDataBanks, maxComputePushDataBanks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushConstantBankPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsPushConstantBanks == rhs.maxGraphicsPushConstantBanks ) && + ( maxComputePushConstantBanks == rhs.maxComputePushConstantBanks ) && ( maxGraphicsPushDataBanks == rhs.maxGraphicsPushDataBanks ) && + ( maxComputePushDataBanks == rhs.maxComputePushDataBanks ); +# endif + } + + bool operator!=( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDevicePushConstantBankPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsPushConstantBanks = {}; + uint32_t maxComputePushConstantBanks = {}; + uint32_t maxGraphicsPushDataBanks = {}; + uint32_t maxComputePushDataBanks = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDevicePushConstantBankPropertiesNV; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDevicePushConstantBankPropertiesNV; + }; + // wrapper struct for struct VkPhysicalDevicePushDescriptorProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushDescriptorProperties.html struct PhysicalDevicePushDescriptorProperties @@ -134569,6 +138833,134 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + // wrapper struct for struct VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT.html + struct PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT( Bool32 shaderSubgroupPartitioned_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupPartitioned{ shaderSubgroupPartitioned_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT( PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT( VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT & + operator=( PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT & operator=( VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT & setShaderSubgroupPartitioned( Bool32 shaderSubgroupPartitioned_ ) & + VULKAN_HPP_NOEXCEPT + { + shaderSubgroupPartitioned = shaderSubgroupPartitioned_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT && setShaderSubgroupPartitioned( Bool32 shaderSubgroupPartitioned_ ) && + VULKAN_HPP_NOEXCEPT + { + shaderSubgroupPartitioned = shaderSubgroupPartitioned_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupPartitionedFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupPartitioned ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupPartitioned == rhs.shaderSubgroupPartitioned ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + void * pNext = {}; + Bool32 shaderSubgroupPartitioned = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + }; +#endif + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT; + }; + // wrapper struct for struct VkPhysicalDeviceShaderSubgroupRotateFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupRotateFeatures.html struct PhysicalDeviceShaderSubgroupRotateFeatures @@ -157200,6 +161592,128 @@ namespace VULKAN_HPP_NAMESPACE using Type = ProtectedSubmitInfo; }; + // wrapper struct for struct VkPushConstantBankInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantBankInfoNV.html + struct PushConstantBankInfoNV + { + using NativeType = VkPushConstantBankInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantBankInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantBankInfoNV( uint32_t bank_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bank{ bank_ } + { + } + + VULKAN_HPP_CONSTEXPR PushConstantBankInfoNV( PushConstantBankInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantBankInfoNV( VkPushConstantBankInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PushConstantBankInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PushConstantBankInfoNV & operator=( PushConstantBankInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PushConstantBankInfoNV & operator=( VkPushConstantBankInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantBankInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantBankInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantBankInfoNV & setBank( uint32_t bank_ ) & VULKAN_HPP_NOEXCEPT + { + bank = bank_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantBankInfoNV && setBank( uint32_t bank_ ) && VULKAN_HPP_NOEXCEPT + { + bank = bank_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushConstantBankInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantBankInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantBankInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPushConstantBankInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bank ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantBankInfoNV const & ) const = default; +#else + bool operator==( PushConstantBankInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bank == rhs.bank ); +# endif + } + + bool operator!=( PushConstantBankInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePushConstantBankInfoNV; + const void * pNext = {}; + uint32_t bank = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PushConstantBankInfoNV; + }; +#endif + + template <> + struct CppType + { + using Type = PushConstantBankInfoNV; + }; + // wrapper struct for struct VkPushConstantsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantsInfo.html struct PushConstantsInfo { @@ -157414,6 +161928,139 @@ namespace VULKAN_HPP_NAMESPACE using PushConstantsInfoKHR = PushConstantsInfo; + // wrapper struct for struct VkPushDataInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDataInfoEXT.html + struct PushDataInfoEXT + { + using NativeType = VkPushDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDataInfoEXT( uint32_t offset_ = {}, HostAddressRangeConstEXT data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , offset{ offset_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR PushDataInfoEXT( PushDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushDataInfoEXT( VkPushDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PushDataInfoEXT( *reinterpret_cast( &rhs ) ) {} + + PushDataInfoEXT & operator=( PushDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PushDataInfoEXT & operator=( VkPushDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDataInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDataInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PushDataInfoEXT & setOffset( uint32_t offset_ ) & VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDataInfoEXT && setOffset( uint32_t offset_ ) && VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 PushDataInfoEXT & setData( HostAddressRangeConstEXT const & data_ ) & VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDataInfoEXT && setData( HostAddressRangeConstEXT const & data_ ) && VULKAN_HPP_NOEXCEPT + { + data = data_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPushDataInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, offset, data ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushDataInfoEXT const & ) const = default; +#else + bool operator==( PushDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( offset == rhs.offset ) && ( data == rhs.data ); +# endif + } + + bool operator!=( PushDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::ePushDataInfoEXT; + const void * pNext = {}; + uint32_t offset = {}; + HostAddressRangeConstEXT data = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = PushDataInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = PushDataInfoEXT; + }; + // wrapper struct for struct VkWriteDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSet.html struct WriteDescriptorSet { @@ -168308,6 +172955,513 @@ namespace VULKAN_HPP_NAMESPACE using Type = ResolveImageModeInfoKHR; }; + // wrapper struct for struct VkTexelBufferDescriptorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTexelBufferDescriptorInfoEXT.html + struct TexelBufferDescriptorInfoEXT + { + using NativeType = VkTexelBufferDescriptorInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTexelBufferDescriptorInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TexelBufferDescriptorInfoEXT( Format format_ = Format::eUndefined, + DeviceAddressRangeEXT addressRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , addressRange{ addressRange_ } + { + } + + VULKAN_HPP_CONSTEXPR TexelBufferDescriptorInfoEXT( TexelBufferDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TexelBufferDescriptorInfoEXT( VkTexelBufferDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : TexelBufferDescriptorInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + TexelBufferDescriptorInfoEXT & operator=( TexelBufferDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TexelBufferDescriptorInfoEXT & operator=( VkTexelBufferDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TexelBufferDescriptorInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TexelBufferDescriptorInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 TexelBufferDescriptorInfoEXT & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TexelBufferDescriptorInfoEXT && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT + { + format = format_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 TexelBufferDescriptorInfoEXT & setAddressRange( DeviceAddressRangeEXT const & addressRange_ ) & VULKAN_HPP_NOEXCEPT + { + addressRange = addressRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TexelBufferDescriptorInfoEXT && setAddressRange( DeviceAddressRangeEXT const & addressRange_ ) && VULKAN_HPP_NOEXCEPT + { + addressRange = addressRange_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTexelBufferDescriptorInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTexelBufferDescriptorInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTexelBufferDescriptorInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTexelBufferDescriptorInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, addressRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TexelBufferDescriptorInfoEXT const & ) const = default; +#else + bool operator==( TexelBufferDescriptorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( addressRange == rhs.addressRange ); +# endif + } + + bool operator!=( TexelBufferDescriptorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eTexelBufferDescriptorInfoEXT; + const void * pNext = {}; + Format format = Format::eUndefined; + DeviceAddressRangeEXT addressRange = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = TexelBufferDescriptorInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = TexelBufferDescriptorInfoEXT; + }; + + // wrapper struct for struct VkTensorViewCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateInfoARM.html + struct TensorViewCreateInfoARM + { + using NativeType = VkTensorViewCreateInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorViewCreateInfoARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateFlagsARM flags_ = {}, + TensorARM tensor_ = {}, + Format format_ = Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , tensor{ tensor_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TensorViewCreateInfoARM( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : TensorViewCreateInfoARM( *reinterpret_cast( &rhs ) ) + { + } + + TensorViewCreateInfoARM & operator=( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TensorViewCreateInfoARM & operator=( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFlags( TensorViewCreateFlagsARM flags_ ) & VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setFlags( TensorViewCreateFlagsARM flags_ ) && VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setTensor( TensorARM tensor_ ) & VULKAN_HPP_NOEXCEPT + { + tensor = tensor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setTensor( TensorARM tensor_ ) && VULKAN_HPP_NOEXCEPT + { + tensor = tensor_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT + { + format = format_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTensorViewCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTensorViewCreateInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTensorViewCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTensorViewCreateInfoARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, tensor, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TensorViewCreateInfoARM const & ) const = default; +#else + bool operator==( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( tensor == rhs.tensor ) && ( format == rhs.format ); +# endif + } + + bool operator!=( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eTensorViewCreateInfoARM; + const void * pNext = {}; + TensorViewCreateFlagsARM flags = {}; + TensorARM tensor = {}; + Format format = Format::eUndefined; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = TensorViewCreateInfoARM; + }; +#endif + + template <> + struct CppType + { + using Type = TensorViewCreateInfoARM; + }; + + union ResourceDescriptorDataEXT + { + using NativeType = VkResourceDescriptorDataEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT( const ImageDescriptorInfoEXT * pImage_ = {} ) : pImage( pImage_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT( const TexelBufferDescriptorInfoEXT * pTexelBuffer_ ) : pTexelBuffer( pTexelBuffer_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT( const DeviceAddressRangeEXT * pAddressRange_ ) : pAddressRange( pAddressRange_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT( const TensorViewCreateInfoARM * pTensorARM_ ) : pTensorARM( pTensorARM_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT & setPImage( const ImageDescriptorInfoEXT * pImage_ ) & VULKAN_HPP_NOEXCEPT + { + pImage = pImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT && setPImage( const ImageDescriptorInfoEXT * pImage_ ) && VULKAN_HPP_NOEXCEPT + { + pImage = pImage_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT & setPTexelBuffer( const TexelBufferDescriptorInfoEXT * pTexelBuffer_ ) & VULKAN_HPP_NOEXCEPT + { + pTexelBuffer = pTexelBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT && setPTexelBuffer( const TexelBufferDescriptorInfoEXT * pTexelBuffer_ ) && VULKAN_HPP_NOEXCEPT + { + pTexelBuffer = pTexelBuffer_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT & setPAddressRange( const DeviceAddressRangeEXT * pAddressRange_ ) & VULKAN_HPP_NOEXCEPT + { + pAddressRange = pAddressRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT && setPAddressRange( const DeviceAddressRangeEXT * pAddressRange_ ) && VULKAN_HPP_NOEXCEPT + { + pAddressRange = pAddressRange_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT & setPTensorARM( const TensorViewCreateInfoARM * pTensorARM_ ) & VULKAN_HPP_NOEXCEPT + { + pTensorARM = pTensorARM_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorDataEXT && setPTensorARM( const TensorViewCreateInfoARM * pTensorARM_ ) && VULKAN_HPP_NOEXCEPT + { + pTensorARM = pTensorARM_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkResourceDescriptorDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkResourceDescriptorDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const ImageDescriptorInfoEXT * pImage; + const TexelBufferDescriptorInfoEXT * pTexelBuffer; + const DeviceAddressRangeEXT * pAddressRange; + const TensorViewCreateInfoARM * pTensorARM; +#else + const VkImageDescriptorInfoEXT * pImage; + const VkTexelBufferDescriptorInfoEXT * pTexelBuffer; + const VkDeviceAddressRangeEXT * pAddressRange; + const VkTensorViewCreateInfoARM * pTensorARM; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = ResourceDescriptorDataEXT; + }; +#endif + + // wrapper struct for struct VkResourceDescriptorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResourceDescriptorInfoEXT.html + struct ResourceDescriptorInfoEXT + { + using NativeType = VkResourceDescriptorInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResourceDescriptorInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT( DescriptorType type_ = DescriptorType::eSampler, + ResourceDescriptorDataEXT data_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT( ResourceDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResourceDescriptorInfoEXT( VkResourceDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ResourceDescriptorInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ResourceDescriptorInfoEXT & operator=( ResourceDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ResourceDescriptorInfoEXT & operator=( VkResourceDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT & setType( DescriptorType type_ ) & VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT && setType( DescriptorType type_ ) && VULKAN_HPP_NOEXCEPT + { + type = type_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT & setData( ResourceDescriptorDataEXT const & data_ ) & VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResourceDescriptorInfoEXT && setData( ResourceDescriptorDataEXT const & data_ ) && VULKAN_HPP_NOEXCEPT + { + data = data_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkResourceDescriptorInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResourceDescriptorInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResourceDescriptorInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkResourceDescriptorInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, data ); + } +#endif + + public: + StructureType sType = StructureType::eResourceDescriptorInfoEXT; + const void * pNext = {}; + DescriptorType type = DescriptorType::eSampler; + ResourceDescriptorDataEXT data = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = ResourceDescriptorInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = ResourceDescriptorInfoEXT; + }; + // wrapper struct for struct VkSamplerBlockMatchWindowCreateInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBlockMatchWindowCreateInfoQCOM.html struct SamplerBlockMatchWindowCreateInfoQCOM @@ -168711,390 +173865,6 @@ namespace VULKAN_HPP_NAMESPACE using Type = SamplerCaptureDescriptorDataInfoEXT; }; - // wrapper struct for struct VkSamplerCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateInfo.html - struct SamplerCreateInfo - { - using NativeType = VkSamplerCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; - -#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateFlags flags_ = {}, - Filter magFilter_ = Filter::eNearest, - Filter minFilter_ = Filter::eNearest, - SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, - SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, - SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, - SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, - float mipLodBias_ = {}, - Bool32 anisotropyEnable_ = {}, - float maxAnisotropy_ = {}, - Bool32 compareEnable_ = {}, - CompareOp compareOp_ = CompareOp::eNever, - float minLod_ = {}, - float maxLod_ = {}, - BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, - Bool32 unnormalizedCoordinates_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , flags{ flags_ } - , magFilter{ magFilter_ } - , minFilter{ minFilter_ } - , mipmapMode{ mipmapMode_ } - , addressModeU{ addressModeU_ } - , addressModeV{ addressModeV_ } - , addressModeW{ addressModeW_ } - , mipLodBias{ mipLodBias_ } - , anisotropyEnable{ anisotropyEnable_ } - , maxAnisotropy{ maxAnisotropy_ } - , compareEnable{ compareEnable_ } - , compareOp{ compareOp_ } - , minLod{ minLod_ } - , maxLod{ maxLod_ } - , borderColor{ borderColor_ } - , unnormalizedCoordinates{ unnormalizedCoordinates_ } - { - } - - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} - - SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - - SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( SamplerCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setFlags( SamplerCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( Filter magFilter_ ) & VULKAN_HPP_NOEXCEPT - { - magFilter = magFilter_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMagFilter( Filter magFilter_ ) && VULKAN_HPP_NOEXCEPT - { - magFilter = magFilter_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( Filter minFilter_ ) & VULKAN_HPP_NOEXCEPT - { - minFilter = minFilter_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMinFilter( Filter minFilter_ ) && VULKAN_HPP_NOEXCEPT - { - minFilter = minFilter_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( SamplerMipmapMode mipmapMode_ ) & VULKAN_HPP_NOEXCEPT - { - mipmapMode = mipmapMode_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMipmapMode( SamplerMipmapMode mipmapMode_ ) && VULKAN_HPP_NOEXCEPT - { - mipmapMode = mipmapMode_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( SamplerAddressMode addressModeU_ ) & VULKAN_HPP_NOEXCEPT - { - addressModeU = addressModeU_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeU( SamplerAddressMode addressModeU_ ) && VULKAN_HPP_NOEXCEPT - { - addressModeU = addressModeU_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( SamplerAddressMode addressModeV_ ) & VULKAN_HPP_NOEXCEPT - { - addressModeV = addressModeV_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeV( SamplerAddressMode addressModeV_ ) && VULKAN_HPP_NOEXCEPT - { - addressModeV = addressModeV_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( SamplerAddressMode addressModeW_ ) & VULKAN_HPP_NOEXCEPT - { - addressModeW = addressModeW_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeW( SamplerAddressMode addressModeW_ ) && VULKAN_HPP_NOEXCEPT - { - addressModeW = addressModeW_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) & VULKAN_HPP_NOEXCEPT - { - mipLodBias = mipLodBias_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMipLodBias( float mipLodBias_ ) && VULKAN_HPP_NOEXCEPT - { - mipLodBias = mipLodBias_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( Bool32 anisotropyEnable_ ) & VULKAN_HPP_NOEXCEPT - { - anisotropyEnable = anisotropyEnable_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAnisotropyEnable( Bool32 anisotropyEnable_ ) && VULKAN_HPP_NOEXCEPT - { - anisotropyEnable = anisotropyEnable_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) & VULKAN_HPP_NOEXCEPT - { - maxAnisotropy = maxAnisotropy_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMaxAnisotropy( float maxAnisotropy_ ) && VULKAN_HPP_NOEXCEPT - { - maxAnisotropy = maxAnisotropy_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( Bool32 compareEnable_ ) & VULKAN_HPP_NOEXCEPT - { - compareEnable = compareEnable_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setCompareEnable( Bool32 compareEnable_ ) && VULKAN_HPP_NOEXCEPT - { - compareEnable = compareEnable_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( CompareOp compareOp_ ) & VULKAN_HPP_NOEXCEPT - { - compareOp = compareOp_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setCompareOp( CompareOp compareOp_ ) && VULKAN_HPP_NOEXCEPT - { - compareOp = compareOp_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) & VULKAN_HPP_NOEXCEPT - { - minLod = minLod_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMinLod( float minLod_ ) && VULKAN_HPP_NOEXCEPT - { - minLod = minLod_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) & VULKAN_HPP_NOEXCEPT - { - maxLod = maxLod_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMaxLod( float maxLod_ ) && VULKAN_HPP_NOEXCEPT - { - maxLod = maxLod_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( BorderColor borderColor_ ) & VULKAN_HPP_NOEXCEPT - { - borderColor = borderColor_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setBorderColor( BorderColor borderColor_ ) && VULKAN_HPP_NOEXCEPT - { - borderColor = borderColor_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) & VULKAN_HPP_NOEXCEPT - { - unnormalizedCoordinates = unnormalizedCoordinates_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) && VULKAN_HPP_NOEXCEPT - { - unnormalizedCoordinates = unnormalizedCoordinates_; - return std::move( *this ); - } -#endif /*VULKAN_HPP_NO_SETTERS*/ - - operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSamplerCreateInfo const *() const VULKAN_HPP_NOEXCEPT - { - return reinterpret_cast( this ); - } - - operator VkSamplerCreateInfo *() VULKAN_HPP_NOEXCEPT - { - return reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) - std::tuple - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, - pNext, - flags, - magFilter, - minFilter, - mipmapMode, - addressModeU, - addressModeV, - addressModeW, - mipLodBias, - anisotropyEnable, - maxAnisotropy, - compareEnable, - compareOp, - minLod, - maxLod, - borderColor, - unnormalizedCoordinates ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerCreateInfo const & ) const = default; -#else - bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && - ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && - ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && - ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && - ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); -# endif - } - - bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - StructureType sType = StructureType::eSamplerCreateInfo; - const void * pNext = {}; - SamplerCreateFlags flags = {}; - Filter magFilter = Filter::eNearest; - Filter minFilter = Filter::eNearest; - SamplerMipmapMode mipmapMode = SamplerMipmapMode::eNearest; - SamplerAddressMode addressModeU = SamplerAddressMode::eRepeat; - SamplerAddressMode addressModeV = SamplerAddressMode::eRepeat; - SamplerAddressMode addressModeW = SamplerAddressMode::eRepeat; - float mipLodBias = {}; - Bool32 anisotropyEnable = {}; - float maxAnisotropy = {}; - Bool32 compareEnable = {}; - CompareOp compareOp = CompareOp::eNever; - float minLod = {}; - float maxLod = {}; - BorderColor borderColor = BorderColor::eFloatTransparentBlack; - Bool32 unnormalizedCoordinates = {}; - }; - -#if 20 <= VULKAN_HPP_CPP_VERSION - template <> - struct CppType - { - using Type = SamplerCreateInfo; - }; -#endif - - template <> - struct CppType - { - using Type = SamplerCreateInfo; - }; - // wrapper struct for struct VkSamplerCubicWeightsCreateInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCubicWeightsCreateInfoQCOM.html struct SamplerCubicWeightsCreateInfoQCOM @@ -169340,6 +174110,129 @@ namespace VULKAN_HPP_NAMESPACE using Type = SamplerCustomBorderColorCreateInfoEXT; }; + // wrapper struct for struct VkSamplerCustomBorderColorIndexCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCustomBorderColorIndexCreateInfoEXT.html + struct SamplerCustomBorderColorIndexCreateInfoEXT + { + using NativeType = VkSamplerCustomBorderColorIndexCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorIndexCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCustomBorderColorIndexCreateInfoEXT( uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerCustomBorderColorIndexCreateInfoEXT( SamplerCustomBorderColorIndexCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorIndexCreateInfoEXT( VkSamplerCustomBorderColorIndexCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCustomBorderColorIndexCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCustomBorderColorIndexCreateInfoEXT & operator=( SamplerCustomBorderColorIndexCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCustomBorderColorIndexCreateInfoEXT & operator=( VkSamplerCustomBorderColorIndexCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorIndexCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorIndexCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorIndexCreateInfoEXT & setIndex( uint32_t index_ ) & VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorIndexCreateInfoEXT && setIndex( uint32_t index_ ) && VULKAN_HPP_NOEXCEPT + { + index = index_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCustomBorderColorIndexCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorIndexCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorIndexCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorIndexCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCustomBorderColorIndexCreateInfoEXT const & ) const = default; +#else + bool operator==( SamplerCustomBorderColorIndexCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ); +# endif + } + + bool operator!=( SamplerCustomBorderColorIndexCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eSamplerCustomBorderColorIndexCreateInfoEXT; + const void * pNext = {}; + uint32_t index = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = SamplerCustomBorderColorIndexCreateInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorIndexCreateInfoEXT; + }; + // wrapper struct for struct VkSamplerReductionModeCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerReductionModeCreateInfo.html struct SamplerReductionModeCreateInfo @@ -172536,6 +177429,166 @@ namespace VULKAN_HPP_NAMESPACE using Type = ShaderCreateInfoEXT; }; + // wrapper struct for struct VkShaderDescriptorSetAndBindingMappingInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderDescriptorSetAndBindingMappingInfoEXT.html + struct ShaderDescriptorSetAndBindingMappingInfoEXT + { + using NativeType = VkShaderDescriptorSetAndBindingMappingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderDescriptorSetAndBindingMappingInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderDescriptorSetAndBindingMappingInfoEXT( uint32_t mappingCount_ = {}, + const DescriptorSetAndBindingMappingEXT * pMappings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mappingCount{ mappingCount_ } + , pMappings{ pMappings_ } + { + } + + VULKAN_HPP_CONSTEXPR ShaderDescriptorSetAndBindingMappingInfoEXT( ShaderDescriptorSetAndBindingMappingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderDescriptorSetAndBindingMappingInfoEXT( VkShaderDescriptorSetAndBindingMappingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderDescriptorSetAndBindingMappingInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderDescriptorSetAndBindingMappingInfoEXT( ArrayProxyNoTemporaries const & mappings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), mappingCount( static_cast( mappings_.size() ) ), pMappings( mappings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShaderDescriptorSetAndBindingMappingInfoEXT & operator=( ShaderDescriptorSetAndBindingMappingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderDescriptorSetAndBindingMappingInfoEXT & operator=( VkShaderDescriptorSetAndBindingMappingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderDescriptorSetAndBindingMappingInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderDescriptorSetAndBindingMappingInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ShaderDescriptorSetAndBindingMappingInfoEXT & setMappingCount( uint32_t mappingCount_ ) & VULKAN_HPP_NOEXCEPT + { + mappingCount = mappingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderDescriptorSetAndBindingMappingInfoEXT && setMappingCount( uint32_t mappingCount_ ) && VULKAN_HPP_NOEXCEPT + { + mappingCount = mappingCount_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 ShaderDescriptorSetAndBindingMappingInfoEXT & setPMappings( const DescriptorSetAndBindingMappingEXT * pMappings_ ) & + VULKAN_HPP_NOEXCEPT + { + pMappings = pMappings_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderDescriptorSetAndBindingMappingInfoEXT && setPMappings( const DescriptorSetAndBindingMappingEXT * pMappings_ ) && + VULKAN_HPP_NOEXCEPT + { + pMappings = pMappings_; + return std::move( *this ); + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderDescriptorSetAndBindingMappingInfoEXT & + setMappings( ArrayProxyNoTemporaries const & mappings_ ) VULKAN_HPP_NOEXCEPT + { + mappingCount = static_cast( mappings_.size() ); + pMappings = mappings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkShaderDescriptorSetAndBindingMappingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderDescriptorSetAndBindingMappingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderDescriptorSetAndBindingMappingInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderDescriptorSetAndBindingMappingInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mappingCount, pMappings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderDescriptorSetAndBindingMappingInfoEXT const & ) const = default; +#else + bool operator==( ShaderDescriptorSetAndBindingMappingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mappingCount == rhs.mappingCount ) && ( pMappings == rhs.pMappings ); +# endif + } + + bool operator!=( ShaderDescriptorSetAndBindingMappingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eShaderDescriptorSetAndBindingMappingInfoEXT; + const void * pNext = {}; + uint32_t mappingCount = {}; + const DescriptorSetAndBindingMappingEXT * pMappings = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = ShaderDescriptorSetAndBindingMappingInfoEXT; + }; +#endif + + template <> + struct CppType + { + using Type = ShaderDescriptorSetAndBindingMappingInfoEXT; + }; + // wrapper struct for struct VkShaderModuleCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleCreateInfo.html struct ShaderModuleCreateInfo { @@ -175163,6 +180216,131 @@ namespace VULKAN_HPP_NAMESPACE using SubresourceLayout2EXT = SubresourceLayout2; using SubresourceLayout2KHR = SubresourceLayout2; + // wrapper struct for struct VkSubsampledImageFormatPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubsampledImageFormatPropertiesEXT.html + struct SubsampledImageFormatPropertiesEXT + { + using NativeType = VkSubsampledImageFormatPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubsampledImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubsampledImageFormatPropertiesEXT( uint32_t subsampledImageDescriptorCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subsampledImageDescriptorCount{ subsampledImageDescriptorCount_ } + { + } + + VULKAN_HPP_CONSTEXPR SubsampledImageFormatPropertiesEXT( SubsampledImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubsampledImageFormatPropertiesEXT( VkSubsampledImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubsampledImageFormatPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubsampledImageFormatPropertiesEXT & operator=( SubsampledImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubsampledImageFormatPropertiesEXT & operator=( VkSubsampledImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubsampledImageFormatPropertiesEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubsampledImageFormatPropertiesEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return std::move( *this ); + } + + VULKAN_HPP_CONSTEXPR_14 SubsampledImageFormatPropertiesEXT & setSubsampledImageDescriptorCount( uint32_t subsampledImageDescriptorCount_ ) & + VULKAN_HPP_NOEXCEPT + { + subsampledImageDescriptorCount = subsampledImageDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubsampledImageFormatPropertiesEXT && setSubsampledImageDescriptorCount( uint32_t subsampledImageDescriptorCount_ ) && + VULKAN_HPP_NOEXCEPT + { + subsampledImageDescriptorCount = subsampledImageDescriptorCount_; + return std::move( *this ); + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubsampledImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubsampledImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubsampledImageFormatPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubsampledImageFormatPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) + std::tuple reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subsampledImageDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubsampledImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( SubsampledImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledImageDescriptorCount == rhs.subsampledImageDescriptorCount ); +# endif + } + + bool operator!=( SubsampledImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + StructureType sType = StructureType::eSubsampledImageFormatPropertiesEXT; + const void * pNext = {}; + uint32_t subsampledImageDescriptorCount = {}; + }; + +#if 20 <= VULKAN_HPP_CPP_VERSION + template <> + struct CppType + { + using Type = SubsampledImageFormatPropertiesEXT; + }; +#endif + + template <> + struct CppType + { + using Type = SubsampledImageFormatPropertiesEXT; + }; + // wrapper struct for struct VkSurfaceCapabilities2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2EXT.html struct SurfaceCapabilities2EXT { @@ -179879,160 +185057,6 @@ namespace VULKAN_HPP_NAMESPACE using Type = TensorViewCaptureDescriptorDataInfoARM; }; - // wrapper struct for struct VkTensorViewCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateInfoARM.html - struct TensorViewCreateInfoARM - { - using NativeType = VkTensorViewCreateInfoARM; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorViewCreateInfoARM; - -#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateFlagsARM flags_ = {}, - TensorARM tensor_ = {}, - Format format_ = Format::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , flags{ flags_ } - , tensor{ tensor_ } - , format{ format_ } - { - } - - VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TensorViewCreateInfoARM( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT - : TensorViewCreateInfoARM( *reinterpret_cast( &rhs ) ) - { - } - - TensorViewCreateInfoARM & operator=( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - - TensorViewCreateInfoARM & operator=( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFlags( TensorViewCreateFlagsARM flags_ ) & VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setFlags( TensorViewCreateFlagsARM flags_ ) && VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setTensor( TensorARM tensor_ ) & VULKAN_HPP_NOEXCEPT - { - tensor = tensor_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setTensor( TensorARM tensor_ ) && VULKAN_HPP_NOEXCEPT - { - tensor = tensor_; - return std::move( *this ); - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT - { - format = format_; - return std::move( *this ); - } -#endif /*VULKAN_HPP_NO_SETTERS*/ - - operator VkTensorViewCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkTensorViewCreateInfoARM &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkTensorViewCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT - { - return reinterpret_cast( this ); - } - - operator VkTensorViewCreateInfoARM *() VULKAN_HPP_NOEXCEPT - { - return reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) - std::tuple - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, flags, tensor, format ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( TensorViewCreateInfoARM const & ) const = default; -#else - bool operator==( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( tensor == rhs.tensor ) && ( format == rhs.format ); -# endif - } - - bool operator!=( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - StructureType sType = StructureType::eTensorViewCreateInfoARM; - const void * pNext = {}; - TensorViewCreateFlagsARM flags = {}; - TensorARM tensor = {}; - Format format = Format::eUndefined; - }; - -#if 20 <= VULKAN_HPP_CPP_VERSION - template <> - struct CppType - { - using Type = TensorViewCreateInfoARM; - }; -#endif - - template <> - struct CppType - { - using Type = TensorViewCreateInfoARM; - }; - // wrapper struct for struct VkTextureLODGatherFormatPropertiesAMD, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTextureLODGatherFormatPropertiesAMD.html struct TextureLODGatherFormatPropertiesAMD diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index 9f30668..8a73dad 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -140,12 +140,12 @@ namespace VULKAN_HPP_NAMESPACE result += " Disjoint |"; if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += " CornerSampledNV |"; + if ( value & ImageCreateFlagBits::eDescriptorHeapCaptureReplayEXT ) + result += " DescriptorHeapCaptureReplayEXT |"; if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += " SampleLocationsCompatibleDepthEXT |"; if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += " SubsampledEXT |"; - if ( value & ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) - result += " DescriptorBufferCaptureReplayEXT |"; if ( value & ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) result += " MultisampledRenderToSingleSampledEXT |"; if ( value & ImageCreateFlagBits::e2DViewCompatibleEXT ) @@ -346,6 +346,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result = "{"; if ( value & DeviceQueueCreateFlagBits::eProtected ) result += " Protected |"; + if ( value & DeviceQueueCreateFlagBits::eInternallySynchronizedKHR ) + result += " InternallySynchronizedKHR |"; if ( result.size() > 1 ) result.back() = '}'; @@ -607,6 +609,8 @@ namespace VULKAN_HPP_NAMESPACE if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX ) result += " ExecutionGraphScratchAMDX |"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eDescriptorHeapEXT ) + result += " DescriptorHeapEXT |"; if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) result += " AccelerationStructureBuildInputReadOnlyKHR |"; if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) @@ -1540,8 +1544,8 @@ namespace VULKAN_HPP_NAMESPACE result += " Rotate |"; if ( value & SubgroupFeatureFlagBits::eRotateClustered ) result += " RotateClustered |"; - if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) - result += " PartitionedNV |"; + if ( value & SubgroupFeatureFlagBits::ePartitionedEXT ) + result += " PartitionedEXT |"; if ( result.size() > 1 ) result.back() = '}'; @@ -1794,6 +1798,10 @@ namespace VULKAN_HPP_NAMESPACE result += " VideoDecodeReadKHR |"; if ( value & AccessFlagBits2::eVideoDecodeWriteKHR ) result += " VideoDecodeWriteKHR |"; + if ( value & AccessFlagBits2::eSamplerHeapReadEXT ) + result += " SamplerHeapReadEXT |"; + if ( value & AccessFlagBits2::eResourceHeapReadEXT ) + result += " ResourceHeapReadEXT |"; if ( value & AccessFlagBits2::eVideoEncodeReadKHR ) result += " VideoEncodeReadKHR |"; if ( value & AccessFlagBits2::eVideoEncodeWriteKHR ) @@ -2074,6 +2082,8 @@ namespace VULKAN_HPP_NAMESPACE if ( value & BufferUsageFlagBits2::eExecutionGraphScratchAMDX ) result += " ExecutionGraphScratchAMDX |"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2::eDescriptorHeapEXT ) + result += " DescriptorHeapEXT |"; if ( value & BufferUsageFlagBits2::eConditionalRenderingEXT ) result += " ConditionalRenderingEXT |"; if ( value & BufferUsageFlagBits2::eShaderBindingTableKHR ) @@ -2162,6 +2172,8 @@ namespace VULKAN_HPP_NAMESPACE if ( value & PipelineCreateFlagBits2::eExecutionGraphAMDX ) result += " ExecutionGraphAMDX |"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineCreateFlagBits2::eDescriptorHeapEXT ) + result += " DescriptorHeapEXT |"; if ( value & PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV ) result += " RayTracingAllowSpheresAndLinearSweptSpheresNV |"; if ( value & PipelineCreateFlagBits2::eEnableLegacyDitheringEXT ) @@ -3085,6 +3097,52 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } + //=== VK_EXT_descriptor_heap === + + VULKAN_HPP_INLINE std::string to_string( SpirvResourceTypeFlagsEXT value ) + { + std::string result = "{"; + if ( value & SpirvResourceTypeFlagBitsEXT::eSampler ) + result += " Sampler |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eSampledImage ) + result += " SampledImage |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eReadOnlyImage ) + result += " ReadOnlyImage |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eReadWriteImage ) + result += " ReadWriteImage |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eCombinedSampledImage ) + result += " CombinedSampledImage |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eUniformBuffer ) + result += " UniformBuffer |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eReadOnlyStorageBuffer ) + result += " ReadOnlyStorageBuffer |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eReadWriteStorageBuffer ) + result += " ReadWriteStorageBuffer |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eAccelerationStructure ) + result += " AccelerationStructure |"; + if ( value & SpirvResourceTypeFlagBitsEXT::eTensorARM ) + result += " TensorARM |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( TensorViewCreateFlagsARM value ) + { + std::string result = "{"; + if ( value & TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay ) + result += " DescriptorBufferCaptureReplay |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + //=== VK_NV_fragment_coverage_to_color === VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) @@ -3819,6 +3877,8 @@ namespace VULKAN_HPP_NAMESPACE result += " MutableFormat |"; if ( value & TensorCreateFlagBitsARM::eProtected ) result += " Protected |"; + if ( value & TensorCreateFlagBitsARM::eDescriptorHeapCaptureReplay ) + result += " DescriptorHeapCaptureReplay |"; if ( value & TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay ) result += " DescriptorBufferCaptureReplay |"; @@ -3829,19 +3889,6 @@ namespace VULKAN_HPP_NAMESPACE return result; } - VULKAN_HPP_INLINE std::string to_string( TensorViewCreateFlagsARM value ) - { - std::string result = "{"; - if ( value & TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay ) - result += " DescriptorBufferCaptureReplay |"; - - if ( result.size() > 1 ) - result.back() = '}'; - else - result = "{}"; - return result; - } - VULKAN_HPP_INLINE std::string to_string( TensorUsageFlagsARM value ) { std::string result = "{"; @@ -3946,6 +3993,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result = "{"; if ( value & ShaderCreateFlagBitsEXT::eLinkStage ) result += " LinkStage |"; + if ( value & ShaderCreateFlagBitsEXT::eDescriptorHeap ) + result += " DescriptorHeap |"; if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize ) result += " AllowVaryingSubgroupSize |"; if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups ) @@ -4895,6 +4944,21 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExecutionGraphPipelineCreateInfoAMDX : return "ExecutionGraphPipelineCreateInfoAMDX"; case StructureType::ePipelineShaderStageNodeCreateInfoAMDX : return "PipelineShaderStageNodeCreateInfoAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eTexelBufferDescriptorInfoEXT : return "TexelBufferDescriptorInfoEXT"; + case StructureType::eImageDescriptorInfoEXT : return "ImageDescriptorInfoEXT"; + case StructureType::eResourceDescriptorInfoEXT : return "ResourceDescriptorInfoEXT"; + case StructureType::eBindHeapInfoEXT : return "BindHeapInfoEXT"; + case StructureType::ePushDataInfoEXT : return "PushDataInfoEXT"; + case StructureType::eDescriptorSetAndBindingMappingEXT : return "DescriptorSetAndBindingMappingEXT"; + case StructureType::eShaderDescriptorSetAndBindingMappingInfoEXT : return "ShaderDescriptorSetAndBindingMappingInfoEXT"; + case StructureType::eOpaqueCaptureDataCreateInfoEXT : return "OpaqueCaptureDataCreateInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorHeapPropertiesEXT : return "PhysicalDeviceDescriptorHeapPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorHeapFeaturesEXT : return "PhysicalDeviceDescriptorHeapFeaturesEXT"; + case StructureType::eCommandBufferInheritanceDescriptorHeapInfoEXT : return "CommandBufferInheritanceDescriptorHeapInfoEXT"; + case StructureType::eSamplerCustomBorderColorIndexCreateInfoEXT : return "SamplerCustomBorderColorIndexCreateInfoEXT"; + case StructureType::eIndirectCommandsLayoutPushDataTokenNV : return "IndirectCommandsLayoutPushDataTokenNV"; + case StructureType::eSubsampledImageFormatPropertiesEXT : return "SubsampledImageFormatPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorHeapTensorPropertiesARM : return "PhysicalDeviceDescriptorHeapTensorPropertiesARM"; case StructureType::eAttachmentSampleCountInfoAMD : return "AttachmentSampleCountInfoAMD"; case StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR : return "PhysicalDeviceShaderBfloat16FeaturesKHR"; case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT"; @@ -4942,39 +5006,40 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR : return "PhysicalDevicePortabilitySubsetFeaturesKHR"; case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: return "PhysicalDevicePortabilitySubsetPropertiesKHR"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; - case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; - case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; - case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV"; - case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV"; - case StructureType::eGeometryNV : return "GeometryNV"; - case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; - case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; - case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV"; - case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV"; - case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; - case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; - case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; - case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV"; - case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; - case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; - case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT"; - case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT"; - case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT"; - case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT"; - case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; - case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR"; - case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD"; - case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD"; - case StructureType::eVideoDecodeH265CapabilitiesKHR : return "VideoDecodeH265CapabilitiesKHR"; - case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR : return "VideoDecodeH265SessionParametersCreateInfoKHR"; - case StructureType::eVideoDecodeH265SessionParametersAddInfoKHR : return "VideoDecodeH265SessionParametersAddInfoKHR"; - case StructureType::eVideoDecodeH265ProfileInfoKHR : return "VideoDecodeH265ProfileInfoKHR"; - case StructureType::eVideoDecodeH265PictureInfoKHR : return "VideoDecodeH265PictureInfoKHR"; - case StructureType::eVideoDecodeH265DpbSlotInfoKHR : return "VideoDecodeH265DpbSlotInfoKHR"; - case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV : return "GeometryNV"; + case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::ePhysicalDeviceCooperativeMatrixConversionFeaturesQCOM: return "PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM"; + case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::eVideoDecodeH265CapabilitiesKHR : return "VideoDecodeH265CapabilitiesKHR"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR : return "VideoDecodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoKHR : return "VideoDecodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH265ProfileInfoKHR : return "VideoDecodeH265ProfileInfoKHR"; + case StructureType::eVideoDecodeH265PictureInfoKHR : return "VideoDecodeH265PictureInfoKHR"; + case StructureType::eVideoDecodeH265DpbSlotInfoKHR : return "VideoDecodeH265DpbSlotInfoKHR"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; #if defined( VK_USE_PLATFORM_GGP ) case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; #endif /*VK_USE_PLATFORM_GGP*/ @@ -5407,6 +5472,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM : return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM"; case StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT : return "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT"; case StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT : return "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT"; + case StructureType::ePhysicalDeviceInternallySynchronizedQueuesFeaturesKHR : return "PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR"; case StructureType::eLatencySleepModeInfoNV : return "LatencySleepModeInfoNV"; case StructureType::eLatencySleepInfoNV : return "LatencySleepInfoNV"; case StructureType::eSetLatencyMarkerInfoNV : return "SetLatencyMarkerInfoNV"; @@ -5569,6 +5635,9 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA : return "PhysicalDeviceImageAlignmentControlPropertiesMESA"; case StructureType::eImageAlignmentControlCreateInfoMESA : return "ImageAlignmentControlCreateInfoMESA"; case StructureType::ePhysicalDeviceShaderFmaFeaturesKHR : return "PhysicalDeviceShaderFmaFeaturesKHR"; + case StructureType::ePushConstantBankInfoNV : return "PushConstantBankInfoNV"; + case StructureType::ePhysicalDevicePushConstantBankFeaturesNV : return "PhysicalDevicePushConstantBankFeaturesNV"; + case StructureType::ePhysicalDevicePushConstantBankPropertiesNV : return "PhysicalDevicePushConstantBankPropertiesNV"; case StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT : return "PhysicalDeviceRayTracingInvocationReorderFeaturesEXT"; case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT : return "PhysicalDeviceRayTracingInvocationReorderPropertiesEXT"; case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT : return "PhysicalDeviceDepthClampControlFeaturesEXT"; @@ -5633,6 +5702,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT: return "PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT"; case StructureType::eComputeOccupancyPriorityParametersNV : return "ComputeOccupancyPriorityParametersNV"; case StructureType::ePhysicalDeviceComputeOccupancyPriorityFeaturesNV : return "PhysicalDeviceComputeOccupancyPriorityFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSubgroupPartitionedFeaturesEXT : return "PhysicalDeviceShaderSubgroupPartitionedFeaturesEXT"; default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; } } @@ -6083,9 +6153,9 @@ namespace VULKAN_HPP_NAMESPACE case ImageCreateFlagBits::eProtected : return "Protected"; case ImageCreateFlagBits::eDisjoint : return "Disjoint"; case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV"; + case ImageCreateFlagBits::eDescriptorHeapCaptureReplayEXT : return "DescriptorHeapCaptureReplayEXT"; case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT"; case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; - case ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT : return "DescriptorBufferCaptureReplayEXT"; case ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT: return "MultisampledRenderToSingleSampledEXT"; case ImageCreateFlagBits::e2DViewCompatibleEXT : return "2DViewCompatibleEXT"; case ImageCreateFlagBits::eVideoProfileIndependentKHR : return "VideoProfileIndependentKHR"; @@ -6262,8 +6332,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case DeviceQueueCreateFlagBits::eProtected: return "Protected"; - default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; + case DeviceQueueCreateFlagBits::eProtected : return "Protected"; + case DeviceQueueCreateFlagBits::eInternallySynchronizedKHR: return "InternallySynchronizedKHR"; + default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; } } @@ -6449,6 +6520,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eDescriptorHeapEXT : return "DescriptorHeapEXT"; case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; case BufferUsageFlagBits::eAccelerationStructureStorageKHR : return "AccelerationStructureStorageKHR"; case BufferUsageFlagBits::eShaderBindingTableKHR : return "ShaderBindingTableKHR"; @@ -7565,7 +7637,7 @@ namespace VULKAN_HPP_NAMESPACE case SubgroupFeatureFlagBits::eQuad : return "Quad"; case SubgroupFeatureFlagBits::eRotate : return "Rotate"; case SubgroupFeatureFlagBits::eRotateClustered: return "RotateClustered"; - case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV"; + case SubgroupFeatureFlagBits::ePartitionedEXT : return "PartitionedEXT"; default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; } } @@ -7850,6 +7922,8 @@ namespace VULKAN_HPP_NAMESPACE case AccessFlagBits2::eShaderStorageWrite : return "ShaderStorageWrite"; case AccessFlagBits2::eVideoDecodeReadKHR : return "VideoDecodeReadKHR"; case AccessFlagBits2::eVideoDecodeWriteKHR : return "VideoDecodeWriteKHR"; + case AccessFlagBits2::eSamplerHeapReadEXT : return "SamplerHeapReadEXT"; + case AccessFlagBits2::eResourceHeapReadEXT : return "ResourceHeapReadEXT"; case AccessFlagBits2::eVideoEncodeReadKHR : return "VideoEncodeReadKHR"; case AccessFlagBits2::eVideoEncodeWriteKHR : return "VideoEncodeWriteKHR"; case AccessFlagBits2::eShaderTileAttachmentReadQCOM : return "ShaderTileAttachmentReadQCOM"; @@ -8020,6 +8094,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) case BufferUsageFlagBits2::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2::eDescriptorHeapEXT : return "DescriptorHeapEXT"; case BufferUsageFlagBits2::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; case BufferUsageFlagBits2::eShaderBindingTableKHR : return "ShaderBindingTableKHR"; case BufferUsageFlagBits2::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; @@ -8071,6 +8146,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) case PipelineCreateFlagBits2::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineCreateFlagBits2::eDescriptorHeapEXT : return "DescriptorHeapEXT"; case PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV: return "RayTracingAllowSpheresAndLinearSweptSpheresNV"; case PipelineCreateFlagBits2::eEnableLegacyDitheringEXT : return "EnableLegacyDitheringEXT"; case PipelineCreateFlagBits2::eDeferCompileNV : return "DeferCompileNV"; @@ -8996,6 +9072,55 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } + //=== VK_EXT_descriptor_heap === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorMappingSourceEXT value ) + { + switch ( value ) + { + case DescriptorMappingSourceEXT::eHeapWithConstantOffset : return "HeapWithConstantOffset"; + case DescriptorMappingSourceEXT::eHeapWithPushIndex : return "HeapWithPushIndex"; + case DescriptorMappingSourceEXT::eHeapWithIndirectIndex : return "HeapWithIndirectIndex"; + case DescriptorMappingSourceEXT::eHeapWithIndirectIndexArray: return "HeapWithIndirectIndexArray"; + case DescriptorMappingSourceEXT::eResourceHeapData : return "ResourceHeapData"; + case DescriptorMappingSourceEXT::ePushData : return "PushData"; + case DescriptorMappingSourceEXT::ePushAddress : return "PushAddress"; + case DescriptorMappingSourceEXT::eIndirectAddress : return "IndirectAddress"; + case DescriptorMappingSourceEXT::eHeapWithShaderRecordIndex : return "HeapWithShaderRecordIndex"; + case DescriptorMappingSourceEXT::eShaderRecordData : return "ShaderRecordData"; + case DescriptorMappingSourceEXT::eShaderRecordAddress : return "ShaderRecordAddress"; + default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SpirvResourceTypeFlagBitsEXT value ) + { + switch ( value ) + { + case SpirvResourceTypeFlagBitsEXT::eAll : return "All"; + case SpirvResourceTypeFlagBitsEXT::eSampler : return "Sampler"; + case SpirvResourceTypeFlagBitsEXT::eSampledImage : return "SampledImage"; + case SpirvResourceTypeFlagBitsEXT::eReadOnlyImage : return "ReadOnlyImage"; + case SpirvResourceTypeFlagBitsEXT::eReadWriteImage : return "ReadWriteImage"; + case SpirvResourceTypeFlagBitsEXT::eCombinedSampledImage : return "CombinedSampledImage"; + case SpirvResourceTypeFlagBitsEXT::eUniformBuffer : return "UniformBuffer"; + case SpirvResourceTypeFlagBitsEXT::eReadOnlyStorageBuffer : return "ReadOnlyStorageBuffer"; + case SpirvResourceTypeFlagBitsEXT::eReadWriteStorageBuffer: return "ReadWriteStorageBuffer"; + case SpirvResourceTypeFlagBitsEXT::eAccelerationStructure : return "AccelerationStructure"; + case SpirvResourceTypeFlagBitsEXT::eTensorARM : return "TensorARM"; + default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorViewCreateFlagBitsARM value ) + { + switch ( value ) + { + case TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay: return "DescriptorBufferCaptureReplay"; + default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_EXT_blend_operation_advanced === VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BlendOverlapEXT value ) @@ -9519,6 +9644,7 @@ namespace VULKAN_HPP_NAMESPACE case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed"; case IndirectCommandsTokenTypeNV::eDraw : return "Draw"; case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks"; + case IndirectCommandsTokenTypeNV::ePushData : return "PushData"; case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks"; case IndirectCommandsTokenTypeNV::ePipeline : return "Pipeline"; case IndirectCommandsTokenTypeNV::eDispatch : return "Dispatch"; @@ -10129,20 +10255,12 @@ namespace VULKAN_HPP_NAMESPACE { case TensorCreateFlagBitsARM::eMutableFormat : return "MutableFormat"; case TensorCreateFlagBitsARM::eProtected : return "Protected"; + case TensorCreateFlagBitsARM::eDescriptorHeapCaptureReplay : return "DescriptorHeapCaptureReplay"; case TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay: return "DescriptorBufferCaptureReplay"; default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorViewCreateFlagBitsARM value ) - { - switch ( value ) - { - case TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay: return "DescriptorBufferCaptureReplay"; - default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; - } - } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorUsageFlagBitsARM value ) { switch ( value ) @@ -10289,6 +10407,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case ShaderCreateFlagBitsEXT::eLinkStage : return "LinkStage"; + case ShaderCreateFlagBitsEXT::eDescriptorHeap : return "DescriptorHeap"; case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize : return "AllowVaryingSubgroupSize"; case ShaderCreateFlagBitsEXT::eRequireFullSubgroups : return "RequireFullSubgroups"; case ShaderCreateFlagBitsEXT::eNoTaskShader : return "NoTaskShader"; @@ -10814,22 +10933,24 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case IndirectCommandsTokenTypeEXT::eExecutionSet : return "ExecutionSet"; - case IndirectCommandsTokenTypeEXT::ePushConstant : return "PushConstant"; - case IndirectCommandsTokenTypeEXT::eSequenceIndex : return "SequenceIndex"; - case IndirectCommandsTokenTypeEXT::eIndexBuffer : return "IndexBuffer"; - case IndirectCommandsTokenTypeEXT::eVertexBuffer : return "VertexBuffer"; - case IndirectCommandsTokenTypeEXT::eDrawIndexed : return "DrawIndexed"; - case IndirectCommandsTokenTypeEXT::eDraw : return "Draw"; - case IndirectCommandsTokenTypeEXT::eDrawIndexedCount : return "DrawIndexedCount"; - case IndirectCommandsTokenTypeEXT::eDrawCount : return "DrawCount"; - case IndirectCommandsTokenTypeEXT::eDispatch : return "Dispatch"; - case IndirectCommandsTokenTypeEXT::eDrawMeshTasksNV : return "DrawMeshTasksNV"; - case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCountNV: return "DrawMeshTasksCountNV"; - case IndirectCommandsTokenTypeEXT::eDrawMeshTasks : return "DrawMeshTasks"; - case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCount : return "DrawMeshTasksCount"; - case IndirectCommandsTokenTypeEXT::eTraceRays2 : return "TraceRays2"; - default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; + case IndirectCommandsTokenTypeEXT::eExecutionSet : return "ExecutionSet"; + case IndirectCommandsTokenTypeEXT::ePushConstant : return "PushConstant"; + case IndirectCommandsTokenTypeEXT::eSequenceIndex : return "SequenceIndex"; + case IndirectCommandsTokenTypeEXT::eIndexBuffer : return "IndexBuffer"; + case IndirectCommandsTokenTypeEXT::eVertexBuffer : return "VertexBuffer"; + case IndirectCommandsTokenTypeEXT::eDrawIndexed : return "DrawIndexed"; + case IndirectCommandsTokenTypeEXT::eDraw : return "Draw"; + case IndirectCommandsTokenTypeEXT::eDrawIndexedCount : return "DrawIndexedCount"; + case IndirectCommandsTokenTypeEXT::eDrawCount : return "DrawCount"; + case IndirectCommandsTokenTypeEXT::eDispatch : return "Dispatch"; + case IndirectCommandsTokenTypeEXT::ePushData : return "PushData"; + case IndirectCommandsTokenTypeEXT::ePushDataSequenceIndex: return "PushDataSequenceIndex"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksNV : return "DrawMeshTasksNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCountNV : return "DrawMeshTasksCountNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasks : return "DrawMeshTasks"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCount : return "DrawMeshTasksCount"; + case IndirectCommandsTokenTypeEXT::eTraceRays2 : return "TraceRays2"; + default : return "invalid ( " + toHexString( static_cast( value ) ) + " )"; } } diff --git a/third_party/vulkan/vulkan_video.cppm b/third_party/vulkan/vulkan_video.cppm index 7702ef2..57c4f19 100644 --- a/third_party/vulkan/vulkan_video.cppm +++ b/third_party/vulkan/vulkan_video.cppm @@ -25,7 +25,6 @@ export module vulkan:video; export namespace VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE { - //================= //=== CONSTANTs === //================= @@ -320,5 +319,4 @@ export namespace VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE using VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ReferenceInfo; using VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ReferenceInfoFlags; #endif - } // namespace VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE diff --git a/third_party/vulkan/vulkan_video.hpp b/third_party/vulkan/vulkan_video.hpp index a513d86..2794d0e 100644 --- a/third_party/vulkan/vulkan_video.hpp +++ b/third_party/vulkan/vulkan_video.hpp @@ -62,7 +62,6 @@ namespace VULKAN_HPP_NAMESPACE { namespace VULKAN_HPP_VIDEO_NAMESPACE { - //================= //=== CONSTANTs === //================= @@ -666,11 +665,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && - ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && - ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) && - ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && - ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && + ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && + ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && + ( color_description_present_flag == rhs.color_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && + ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && + ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ); } @@ -721,12 +725,17 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) && - ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && - ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) && + return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && + ( bit_rate_scale == rhs.bit_rate_scale ) && + ( cpb_size_scale == rhs.cpb_size_scale ) && + ( reserved1 == rhs.reserved1 ) && + ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && + ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && + ( cbr_flag == rhs.cbr_flag ) && ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) && - ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length ); + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && + ( time_offset_length == rhs.time_offset_length ); } bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -774,12 +783,21 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && - ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && - ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) && - ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) && - ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && - ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + return ( flags == rhs.flags ) && + ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && + ( sar_width == rhs.sar_width ) && + ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && + ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && + ( matrix_coefficients == rhs.matrix_coefficients ) && + ( num_units_in_tick == rhs.num_units_in_tick ) && + ( time_scale == rhs.time_scale ) && + ( max_num_reorder_frames == rhs.max_num_reorder_frames ) && + ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && + ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && + ( reserved1 == rhs.reserved1 ) && ( pHrdParameters == rhs.pHrdParameters ); } @@ -834,15 +852,22 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) && - ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) && - ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) && - ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) && - ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) && + return ( constraint_set0_flag == rhs.constraint_set0_flag ) && + ( constraint_set1_flag == rhs.constraint_set1_flag ) && + ( constraint_set2_flag == rhs.constraint_set2_flag ) && + ( constraint_set3_flag == rhs.constraint_set3_flag ) && + ( constraint_set4_flag == rhs.constraint_set4_flag ) && + ( constraint_set5_flag == rhs.constraint_set5_flag ) && + ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && + ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) && + ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && + ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) && - ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) && - ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ); + ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && + ( frame_cropping_flag == rhs.frame_cropping_flag ) && + ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && + ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ); } bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -895,8 +920,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) && - ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ); + return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && + ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) && + ( ScalingList4x4 == rhs.ScalingList4x4 ) && + ( ScalingList8x8 == rhs.ScalingList8x8 ); } bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -937,18 +964,30 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) && - ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && - ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && - ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) && - ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) && + return ( flags == rhs.flags ) && + ( profile_idc == rhs.profile_idc ) && + ( level_idc == rhs.level_idc ) && + ( chroma_format_idc == rhs.chroma_format_idc ) && + ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && + ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && + ( pic_order_cnt_type == rhs.pic_order_cnt_type ) && + ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && + ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) && ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && - ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) && - ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) && - ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) && - ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) && - ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) && - ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) && + ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && + ( max_num_ref_frames == rhs.max_num_ref_frames ) && + ( reserved1 == rhs.reserved1 ) && + ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) && + ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && + ( frame_crop_left_offset == rhs.frame_crop_left_offset ) && + ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && + ( frame_crop_top_offset == rhs.frame_crop_top_offset ) && + ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && + ( reserved2 == rhs.reserved2 ) && + ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && + ( pScalingLists == rhs.pScalingLists ) && ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ); } @@ -1013,11 +1052,14 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) && + return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && + ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && - ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && + ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) && - ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag ); + ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && + ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag ); } bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1062,11 +1104,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + return ( flags == rhs.flags ) && + ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && - ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) && - ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) && - ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && + ( weighted_bipred_idc == rhs.weighted_bipred_idc ) && + ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && + ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) && + ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && + ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) && ( pScalingLists == rhs.pScalingLists ); } @@ -1120,8 +1167,11 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) && - ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) && + return ( field_pic_flag == rhs.field_pic_flag ) && + ( is_intra == rhs.is_intra ) && + ( IdrPicFlag == rhs.IdrPicFlag ) && + ( bottom_field_flag == rhs.bottom_field_flag ) && + ( is_reference == rhs.is_reference ) && ( complementary_field_pair == rhs.complementary_field_pair ); } @@ -1165,8 +1215,13 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && - ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) && + return ( flags == rhs.flags ) && + ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && + ( frame_num == rhs.frame_num ) && + ( idr_pic_id == rhs.idr_pic_id ) && ( PicOrderCnt == rhs.PicOrderCnt ); } @@ -1212,8 +1267,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) && - ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing ); + return ( top_field_flag == rhs.top_field_flag ) && + ( bottom_field_flag == rhs.bottom_field_flag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && + ( is_non_existing == rhs.is_non_existing ); } bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1299,8 +1356,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && - ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && + ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && + ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); } bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1341,10 +1400,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && - ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) && - ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) && - ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) && + return ( flags == rhs.flags ) && + ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && + ( luma_weight_l0 == rhs.luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && + ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && + ( chroma_offset_l0 == rhs.chroma_offset_l0 ) && + ( luma_weight_l1 == rhs.luma_weight_l1 ) && + ( luma_offset_l1 == rhs.luma_offset_l1 ) && + ( chroma_weight_l1 == rhs.chroma_weight_l1 ) && ( chroma_offset_l1 == rhs.chroma_offset_l1 ); } @@ -1394,7 +1459,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) && - ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved ); + ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1434,9 +1500,12 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) && - ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) && - ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved ); + return ( IdrPicFlag == rhs.IdrPicFlag ) && + ( is_reference == rhs.is_reference ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && + ( long_term_reference_flag == rhs.long_term_reference_flag ) && + ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1519,7 +1588,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && - ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1559,7 +1629,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) && + return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && + ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) && ( long_term_pic_num == rhs.long_term_pic_num ); } @@ -1602,8 +1673,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( memory_management_control_operation == rhs.memory_management_control_operation ) && - ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && ( long_term_pic_num == rhs.long_term_pic_num ) && - ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 ); + ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && + ( long_term_frame_idx == rhs.long_term_frame_idx ) && + ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 ); } bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1646,11 +1719,17 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && - ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && - ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) && - ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) && - ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) && + return ( flags == rhs.flags ) && + ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && + ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && + ( refList0ModOpCount == rhs.refList0ModOpCount ) && + ( refList1ModOpCount == rhs.refList1ModOpCount ) && + ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && + ( reserved1 == rhs.reserved1 ) && + ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && + ( pRefList1ModOperations == rhs.pRefList1ModOperations ) && ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations ); } @@ -1700,9 +1779,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && - ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) && - ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ); + return ( flags == rhs.flags ) && + ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( idr_pic_id == rhs.idr_pic_id ) && + ( primary_pic_type == rhs.primary_pic_type ) && + ( frame_num == rhs.frame_num ) && + ( PicOrderCnt == rhs.PicOrderCnt ) && + ( temporal_id == rhs.temporal_id ) && + ( reserved1 == rhs.reserved1 ) && + ( pRefLists == rhs.pRefLists ); } bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1750,8 +1836,13 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) && - ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id ); + return ( flags == rhs.flags ) && + ( primary_pic_type == rhs.primary_pic_type ) && + ( FrameNum == rhs.FrameNum ) && + ( PicOrderCnt == rhs.PicOrderCnt ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && + ( long_term_frame_idx == rhs.long_term_frame_idx ) && + ( temporal_id == rhs.temporal_id ); } bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1796,10 +1887,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) && - ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && - ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) && - ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable ); + return ( flags == rhs.flags ) && + ( first_mb_in_slice == rhs.first_mb_in_slice ) && + ( slice_type == rhs.slice_type ) && + ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && + ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && + ( slice_qp_delta == rhs.slice_qp_delta ) && + ( reserved1 == rhs.reserved1 ) && + ( cabac_init_idc == rhs.cabac_init_idc ) && + ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && + ( pWeightTable == rhs.pWeightTable ); } bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -1852,7 +1949,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) && + return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && + ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) && ( max_num_reorder_pics == rhs.max_num_reorder_pics ); } @@ -1893,8 +1991,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && - ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) && + return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && + ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && + ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && + ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ); } @@ -1941,7 +2041,8 @@ namespace VULKAN_HPP_NAMESPACE ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) && ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) && ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) && - ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) && + ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && + ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) && ( low_delay_hrd_flag == rhs.low_delay_hrd_flag ); } @@ -1986,15 +2087,21 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) && + return ( flags == rhs.flags ) && + ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) && ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) && - ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && - ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) && + ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && + ( bit_rate_scale == rhs.bit_rate_scale ) && + ( cpb_size_scale == rhs.cpb_size_scale ) && + ( cpb_size_du_scale == rhs.cpb_size_du_scale ) && ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) && - ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && - ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) && - ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl ); + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && + ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && + ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && + ( reserved == rhs.reserved ) && + ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && + ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl ); } bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -2090,7 +2197,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) && + return ( general_tier_flag == rhs.general_tier_flag ) && + ( general_progressive_source_flag == rhs.general_progressive_source_flag ) && ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) && ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) && ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag ); @@ -2176,11 +2284,18 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) && - ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && - ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) && - ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) && - ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel ); + return ( flags == rhs.flags ) && + ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) && + ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && + ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && + ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && + ( vps_time_scale == rhs.vps_time_scale ) && + ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && + ( reserved3 == rhs.reserved3 ) && + ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && + ( pHrdParameters == rhs.pHrdParameters ) && + ( pProfileTierLevel == rhs.pProfileTierLevel ); } bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -2229,8 +2344,11 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) && - ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) && + return ( ScalingList4x4 == rhs.ScalingList4x4 ) && + ( ScalingList8x8 == rhs.ScalingList8x8 ) && + ( ScalingList16x16 == rhs.ScalingList16x16 ) && + ( ScalingList32x32 == rhs.ScalingList32x32 ) && + ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) && ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 ); } @@ -2274,15 +2392,21 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && - ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && - ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) && + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && + ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && + ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && + ( colour_description_present_flag == rhs.colour_description_present_flag ) && ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && - ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) && - ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) && + ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && + ( field_seq_flag == rhs.field_seq_flag ) && + ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && + ( default_display_window_flag == rhs.default_display_window_flag ) && ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) && ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) && - ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && + ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) && ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) && ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag ); @@ -2340,18 +2464,31 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && - ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && - ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) && + return ( flags == rhs.flags ) && + ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && + ( sar_width == rhs.sar_width ) && + ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && + ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && + ( matrix_coeffs == rhs.matrix_coeffs ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && - ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && - ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) && - ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) && - ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) && - ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) && - ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) && - ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) && - ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && + ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && + ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) && + ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && + ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) && + ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && + ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) && + ( vui_time_scale == rhs.vui_time_scale ) && + ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) && + ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && + ( reserved3 == rhs.reserved3 ) && + ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && + ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) && + ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && + ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) && ( pHrdParameters == rhs.pHrdParameters ); } @@ -2455,27 +2592,34 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && + ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && ( conformance_window_flag == rhs.conformance_window_flag ) && ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) && ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) && - ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) && - ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) && + ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && + ( amp_enabled_flag == rhs.amp_enabled_flag ) && + ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && + ( pcm_enabled_flag == rhs.pcm_enabled_flag ) && ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) && ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) && ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) && ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) && - ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) && + ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && + ( sps_extension_present_flag == rhs.sps_extension_present_flag ) && ( sps_range_extension_flag == rhs.sps_range_extension_flag ) && ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) && ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) && - ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) && + ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && + ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) && ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) && ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) && ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) && ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) && - ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) && - ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) && + ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && + ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) && + ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && + ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) && ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) && ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag ); } @@ -2583,12 +2727,20 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) && - ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) && - ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) && - ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) && - ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) && - ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 ); + return ( flags == rhs.flags ) && + ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && + ( use_delta_flag == rhs.use_delta_flag ) && + ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && + ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) && + ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && + ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) && + ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && + ( reserved3 == rhs.reserved3 ) && + ( num_negative_pics == rhs.num_negative_pics ) && + ( num_positive_pics == rhs.num_positive_pics ) && + ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && + ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 ); } bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -2678,10 +2830,15 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) && - ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && - ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) && - ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + return ( flags == rhs.flags ) && + ( chroma_format_idc == rhs.chroma_format_idc ) && + ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) && + ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && + ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && + ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && + ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) && ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) && @@ -2689,20 +2846,29 @@ namespace VULKAN_HPP_NAMESPACE ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) && ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) && ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) && - ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) && + ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && + ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) && ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) && ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) && ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) && - ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) && - ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) && + ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && + ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && + ( palette_max_size == rhs.palette_max_size ) && ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) && ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) && ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) && - ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) && - ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) && - ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) && - ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) && - ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + ( conf_win_left_offset == rhs.conf_win_left_offset ) && + ( conf_win_right_offset == rhs.conf_win_right_offset ) && + ( conf_win_top_offset == rhs.conf_win_top_offset ) && + ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) && + ( pProfileTierLevel == rhs.pProfileTierLevel ) && + ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && + ( pScalingLists == rhs.pScalingLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && + ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && + ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); } bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -2780,13 +2946,19 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) && - ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) && - ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && - ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) && + ( output_flag_present_flag == rhs.output_flag_present_flag ) && + ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) && + ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && + ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && + ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) && ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) && - ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) && - ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) && - ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) && + ( weighted_pred_flag == rhs.weighted_pred_flag ) && + ( weighted_bipred_flag == rhs.weighted_bipred_flag ) && + ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && + ( tiles_enabled_flag == rhs.tiles_enabled_flag ) && + ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && + ( uniform_spacing_flag == rhs.uniform_spacing_flag ) && ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) && ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) && ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && @@ -2802,7 +2974,8 @@ namespace VULKAN_HPP_NAMESPACE ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) && ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) && ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) && - ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag ); + ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && + ( pps_range_extension_flag == rhs.pps_range_extension_flag ); } bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -2870,25 +3043,41 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && - ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + return ( flags == rhs.flags ) && + ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && + ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) && ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && - ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) && - ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) && - ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) && - ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && + ( init_qp_minus26 == rhs.init_qp_minus26 ) && + ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && + ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) && + ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && + ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) && + ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && + ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) && ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) && ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) && - ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) && - ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) && - ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) && - ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) && + ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && + ( cb_qp_offset_list == rhs.cb_qp_offset_list ) && + ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && + ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) && + ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && + ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) && + ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && + ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) && ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) && - ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) && - ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) && - ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) && - ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) && + ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && + ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) && + ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && + ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) && + ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && + ( column_width_minus1 == rhs.column_width_minus1 ) && + ( row_height_minus1 == rhs.row_height_minus1 ) && + ( reserved3 == rhs.reserved3 ) && + ( pScalingLists == rhs.pScalingLists ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); } @@ -2966,7 +3155,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) && + return ( IrapPicFlag == rhs.IrapPicFlag ) && + ( IdrPicFlag == rhs.IdrPicFlag ) && + ( IsReference == rhs.IsReference ) && ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ); } @@ -3008,11 +3199,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && - ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && - ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && - ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) && - ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) && + return ( flags == rhs.flags ) && + ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && + ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && + ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && + ( reserved == rhs.reserved ) && + ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && + ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) && ( RefPicSetLtCurr == rhs.RefPicSetLtCurr ); } @@ -3143,8 +3339,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && - ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && + ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && + ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); } bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3185,11 +3383,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && - ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) && - ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) && - ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) && - ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) && + return ( flags == rhs.flags ) && + ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && + ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && + ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) && + ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && + ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) && + ( luma_offset_l1 == rhs.luma_offset_l1 ) && + ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) && ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 ); } @@ -3239,14 +3442,18 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) && - ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) && - ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && - ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) && + ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && + ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) && + ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && + ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && + ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && + ( cabac_init_flag == rhs.cabac_init_flag ) && ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) && ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) && ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) && ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) && - ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved ); + ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3296,12 +3503,20 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) && - ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) && - ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) && - ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) && - ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) && - ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && + return ( flags == rhs.flags ) && + ( slice_type == rhs.slice_type ) && + ( slice_segment_address == rhs.slice_segment_address ) && + ( collocated_ref_idx == rhs.collocated_ref_idx ) && + ( MaxNumMergeCand == rhs.MaxNumMergeCand ) && + ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && + ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) && + ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && + ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) && + ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && + ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) && + ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && + ( slice_qp_delta == rhs.slice_qp_delta ) && + ( reserved1 == rhs.reserved1 ) && ( pWeightTable == rhs.pWeightTable ); } @@ -3355,7 +3570,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && - ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3395,9 +3611,13 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && - ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && - ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 ); + return ( flags == rhs.flags ) && + ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && + ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && + ( list_entry_l0 == rhs.list_entry_l0 ) && + ( list_entry_l1 == rhs.list_entry_l1 ); } bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3441,12 +3661,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) && - ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) && - ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) && + return ( is_reference == rhs.is_reference ) && + ( IrapPicFlag == rhs.IrapPicFlag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && + ( discardable_flag == rhs.discardable_flag ) && + ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && + ( pic_output_flag == rhs.pic_output_flag ) && ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) && - ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved ); + ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3493,9 +3717,13 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) && - ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) && - ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt ); + return ( num_long_term_sps == rhs.num_long_term_sps ) && + ( num_long_term_pics == rhs.num_long_term_pics ) && + ( lt_idx_sps == rhs.lt_idx_sps ) && + ( poc_lsb_lt == rhs.poc_lsb_lt ) && + ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) && + ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && + ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt ); } bool operator!=( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3539,11 +3767,18 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && - ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && - ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && - ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) && - ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics ); + return ( flags == rhs.flags ) && + ( pic_type == rhs.pic_type ) && + ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && + ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && + ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( TemporalId == rhs.TemporalId ) && + ( reserved1 == rhs.reserved1 ) && + ( pRefLists == rhs.pRefLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && + ( pLongTermRefPics == rhs.pLongTermRefPics ); } bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3592,7 +3827,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) && + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && + ( unused_for_reference == rhs.unused_for_reference ) && ( reserved == rhs.reserved ); } @@ -3717,8 +3953,12 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( VP9ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( BitDepth == rhs.BitDepth ) && ( subsampling_x == rhs.subsampling_x ) && ( subsampling_y == rhs.subsampling_y ) && - ( reserved1 == rhs.reserved1 ) && ( color_space == rhs.color_space ); + return ( flags == rhs.flags ) && + ( BitDepth == rhs.BitDepth ) && + ( subsampling_x == rhs.subsampling_x ) && + ( subsampling_y == rhs.subsampling_y ) && + ( reserved1 == rhs.reserved1 ) && + ( color_space == rhs.color_space ); } bool operator!=( VP9ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3761,7 +4001,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( VP9LoopFilterFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( loop_filter_delta_enabled == rhs.loop_filter_delta_enabled ) && ( loop_filter_delta_update == rhs.loop_filter_delta_update ) && + return ( loop_filter_delta_enabled == rhs.loop_filter_delta_enabled ) && + ( loop_filter_delta_update == rhs.loop_filter_delta_update ) && ( reserved == rhs.reserved ); } @@ -3802,9 +4043,13 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( VP9LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( loop_filter_level == rhs.loop_filter_level ) && ( loop_filter_sharpness == rhs.loop_filter_sharpness ) && - ( update_ref_delta == rhs.update_ref_delta ) && ( loop_filter_ref_deltas == rhs.loop_filter_ref_deltas ) && - ( update_mode_delta == rhs.update_mode_delta ) && ( loop_filter_mode_deltas == rhs.loop_filter_mode_deltas ); + return ( flags == rhs.flags ) && + ( loop_filter_level == rhs.loop_filter_level ) && + ( loop_filter_sharpness == rhs.loop_filter_sharpness ) && + ( update_ref_delta == rhs.update_ref_delta ) && + ( loop_filter_ref_deltas == rhs.loop_filter_ref_deltas ) && + ( update_mode_delta == rhs.update_mode_delta ) && + ( loop_filter_mode_deltas == rhs.loop_filter_mode_deltas ); } bool operator!=( VP9LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3848,8 +4093,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( VP9SegmentationFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( segmentation_update_map == rhs.segmentation_update_map ) && ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && - ( segmentation_update_data == rhs.segmentation_update_data ) && ( segmentation_abs_or_delta_update == rhs.segmentation_abs_or_delta_update ) && + return ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && + ( segmentation_update_data == rhs.segmentation_update_data ) && + ( segmentation_abs_or_delta_update == rhs.segmentation_abs_or_delta_update ) && ( reserved == rhs.reserved ); } @@ -3892,8 +4139,11 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( VP9Segmentation const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( segmentation_tree_probs == rhs.segmentation_tree_probs ) && - ( segmentation_pred_prob == rhs.segmentation_pred_prob ) && ( FeatureEnabled == rhs.FeatureEnabled ) && ( FeatureData == rhs.FeatureData ); + return ( flags == rhs.flags ) && + ( segmentation_tree_probs == rhs.segmentation_tree_probs ) && + ( segmentation_pred_prob == rhs.segmentation_pred_prob ) && + ( FeatureEnabled == rhs.FeatureEnabled ) && + ( FeatureData == rhs.FeatureData ); } bool operator!=( VP9Segmentation const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3939,10 +4189,15 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeVP9PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( error_resilient_mode == rhs.error_resilient_mode ) && ( intra_only == rhs.intra_only ) && - ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && ( refresh_frame_context == rhs.refresh_frame_context ) && - ( frame_parallel_decoding_mode == rhs.frame_parallel_decoding_mode ) && ( segmentation_enabled == rhs.segmentation_enabled ) && - ( show_frame == rhs.show_frame ) && ( UsePrevFrameMvs == rhs.UsePrevFrameMvs ) && ( reserved == rhs.reserved ); + return ( error_resilient_mode == rhs.error_resilient_mode ) && + ( intra_only == rhs.intra_only ) && + ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( refresh_frame_context == rhs.refresh_frame_context ) && + ( frame_parallel_decoding_mode == rhs.frame_parallel_decoding_mode ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && + ( show_frame == rhs.show_frame ) && + ( UsePrevFrameMvs == rhs.UsePrevFrameMvs ) && + ( reserved == rhs.reserved ); } bool operator!=( DecodeVP9PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -3988,12 +4243,23 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeVP9PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( profile == rhs.profile ) && ( frame_type == rhs.frame_type ) && ( frame_context_idx == rhs.frame_context_idx ) && - ( reset_frame_context == rhs.reset_frame_context ) && ( refresh_frame_flags == rhs.refresh_frame_flags ) && - ( ref_frame_sign_bias_mask == rhs.ref_frame_sign_bias_mask ) && ( interpolation_filter == rhs.interpolation_filter ) && - ( base_q_idx == rhs.base_q_idx ) && ( delta_q_y_dc == rhs.delta_q_y_dc ) && ( delta_q_uv_dc == rhs.delta_q_uv_dc ) && - ( delta_q_uv_ac == rhs.delta_q_uv_ac ) && ( tile_cols_log2 == rhs.tile_cols_log2 ) && ( tile_rows_log2 == rhs.tile_rows_log2 ) && - ( reserved1 == rhs.reserved1 ) && ( pColorConfig == rhs.pColorConfig ) && ( pLoopFilter == rhs.pLoopFilter ) && + return ( flags == rhs.flags ) && + ( profile == rhs.profile ) && + ( frame_type == rhs.frame_type ) && + ( frame_context_idx == rhs.frame_context_idx ) && + ( reset_frame_context == rhs.reset_frame_context ) && + ( refresh_frame_flags == rhs.refresh_frame_flags ) && + ( ref_frame_sign_bias_mask == rhs.ref_frame_sign_bias_mask ) && + ( interpolation_filter == rhs.interpolation_filter ) && + ( base_q_idx == rhs.base_q_idx ) && + ( delta_q_y_dc == rhs.delta_q_y_dc ) && + ( delta_q_uv_dc == rhs.delta_q_uv_dc ) && + ( delta_q_uv_ac == rhs.delta_q_uv_ac ) && + ( tile_cols_log2 == rhs.tile_cols_log2 ) && + ( tile_rows_log2 == rhs.tile_rows_log2 ) && + ( reserved1 == rhs.reserved1 ) && + ( pColorConfig == rhs.pColorConfig ) && + ( pLoopFilter == rhs.pLoopFilter ) && ( pSegmentation == rhs.pSegmentation ); } @@ -4054,8 +4320,11 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1ColorConfigFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( mono_chrome == rhs.mono_chrome ) && ( color_range == rhs.color_range ) && ( separate_uv_delta_q == rhs.separate_uv_delta_q ) && - ( color_description_present_flag == rhs.color_description_present_flag ) && ( reserved == rhs.reserved ); + return ( mono_chrome == rhs.mono_chrome ) && + ( color_range == rhs.color_range ) && + ( separate_uv_delta_q == rhs.separate_uv_delta_q ) && + ( color_description_present_flag == rhs.color_description_present_flag ) && + ( reserved == rhs.reserved ); } bool operator!=( AV1ColorConfigFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4097,9 +4366,15 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( BitDepth == rhs.BitDepth ) && ( subsampling_x == rhs.subsampling_x ) && ( subsampling_y == rhs.subsampling_y ) && - ( reserved1 == rhs.reserved1 ) && ( color_primaries == rhs.color_primaries ) && ( transfer_characteristics == rhs.transfer_characteristics ) && - ( matrix_coefficients == rhs.matrix_coefficients ) && ( chroma_sample_position == rhs.chroma_sample_position ); + return ( flags == rhs.flags ) && + ( BitDepth == rhs.BitDepth ) && + ( subsampling_x == rhs.subsampling_x ) && + ( subsampling_y == rhs.subsampling_y ) && + ( reserved1 == rhs.reserved1 ) && + ( color_primaries == rhs.color_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && + ( matrix_coefficients == rhs.matrix_coefficients ) && + ( chroma_sample_position == rhs.chroma_sample_position ); } bool operator!=( AV1ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4188,7 +4463,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1TimingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( num_units_in_display_tick == rhs.num_units_in_display_tick ) && ( time_scale == rhs.time_scale ) && + return ( flags == rhs.flags ) && + ( num_units_in_display_tick == rhs.num_units_in_display_tick ) && + ( time_scale == rhs.time_scale ) && ( num_ticks_per_picture_minus_1 == rhs.num_ticks_per_picture_minus_1 ); } @@ -4230,7 +4507,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1LoopFilterFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( loop_filter_delta_enabled == rhs.loop_filter_delta_enabled ) && ( loop_filter_delta_update == rhs.loop_filter_delta_update ) && + return ( loop_filter_delta_enabled == rhs.loop_filter_delta_enabled ) && + ( loop_filter_delta_update == rhs.loop_filter_delta_update ) && ( reserved == rhs.reserved ); } @@ -4271,9 +4549,13 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( loop_filter_level == rhs.loop_filter_level ) && ( loop_filter_sharpness == rhs.loop_filter_sharpness ) && - ( update_ref_delta == rhs.update_ref_delta ) && ( loop_filter_ref_deltas == rhs.loop_filter_ref_deltas ) && - ( update_mode_delta == rhs.update_mode_delta ) && ( loop_filter_mode_deltas == rhs.loop_filter_mode_deltas ); + return ( flags == rhs.flags ) && + ( loop_filter_level == rhs.loop_filter_level ) && + ( loop_filter_sharpness == rhs.loop_filter_sharpness ) && + ( update_ref_delta == rhs.update_ref_delta ) && + ( loop_filter_ref_deltas == rhs.loop_filter_ref_deltas ) && + ( update_mode_delta == rhs.update_mode_delta ) && + ( loop_filter_mode_deltas == rhs.loop_filter_mode_deltas ); } bool operator!=( AV1LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4357,9 +4639,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1Quantization const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( base_q_idx == rhs.base_q_idx ) && ( DeltaQYDc == rhs.DeltaQYDc ) && ( DeltaQUDc == rhs.DeltaQUDc ) && - ( DeltaQUAc == rhs.DeltaQUAc ) && ( DeltaQVDc == rhs.DeltaQVDc ) && ( DeltaQVAc == rhs.DeltaQVAc ) && ( qm_y == rhs.qm_y ) && - ( qm_u == rhs.qm_u ) && ( qm_v == rhs.qm_v ); + return ( flags == rhs.flags ) && + ( base_q_idx == rhs.base_q_idx ) && + ( DeltaQYDc == rhs.DeltaQYDc ) && + ( DeltaQUDc == rhs.DeltaQUDc ) && + ( DeltaQUAc == rhs.DeltaQUAc ) && + ( DeltaQVDc == rhs.DeltaQVDc ) && + ( DeltaQVAc == rhs.DeltaQVAc ) && + ( qm_y == rhs.qm_y ) && + ( qm_u == rhs.qm_u ) && + ( qm_v == rhs.qm_v ); } bool operator!=( AV1Quantization const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4484,10 +4773,16 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1TileInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( TileCols == rhs.TileCols ) && ( TileRows == rhs.TileRows ) && - ( context_update_tile_id == rhs.context_update_tile_id ) && ( tile_size_bytes_minus_1 == rhs.tile_size_bytes_minus_1 ) && - ( reserved1 == rhs.reserved1 ) && ( pMiColStarts == rhs.pMiColStarts ) && ( pMiRowStarts == rhs.pMiRowStarts ) && - ( pWidthInSbsMinus1 == rhs.pWidthInSbsMinus1 ) && ( pHeightInSbsMinus1 == rhs.pHeightInSbsMinus1 ); + return ( flags == rhs.flags ) && + ( TileCols == rhs.TileCols ) && + ( TileRows == rhs.TileRows ) && + ( context_update_tile_id == rhs.context_update_tile_id ) && + ( tile_size_bytes_minus_1 == rhs.tile_size_bytes_minus_1 ) && + ( reserved1 == rhs.reserved1 ) && + ( pMiColStarts == rhs.pMiColStarts ) && + ( pMiRowStarts == rhs.pMiRowStarts ) && + ( pWidthInSbsMinus1 == rhs.pWidthInSbsMinus1 ) && + ( pHeightInSbsMinus1 == rhs.pHeightInSbsMinus1 ); } bool operator!=( AV1TileInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4534,8 +4829,11 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1CDEF const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( cdef_damping_minus_3 == rhs.cdef_damping_minus_3 ) && ( cdef_bits == rhs.cdef_bits ) && ( cdef_y_pri_strength == rhs.cdef_y_pri_strength ) && - ( cdef_y_sec_strength == rhs.cdef_y_sec_strength ) && ( cdef_uv_pri_strength == rhs.cdef_uv_pri_strength ) && + return ( cdef_damping_minus_3 == rhs.cdef_damping_minus_3 ) && + ( cdef_bits == rhs.cdef_bits ) && + ( cdef_y_pri_strength == rhs.cdef_y_pri_strength ) && + ( cdef_y_sec_strength == rhs.cdef_y_sec_strength ) && + ( cdef_uv_pri_strength == rhs.cdef_uv_pri_strength ) && ( cdef_uv_sec_strength == rhs.cdef_uv_sec_strength ); } @@ -4657,8 +4955,11 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1FilmGrainFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( chroma_scaling_from_luma == rhs.chroma_scaling_from_luma ) && ( overlap_flag == rhs.overlap_flag ) && - ( clip_to_restricted_range == rhs.clip_to_restricted_range ) && ( update_grain == rhs.update_grain ) && ( reserved == rhs.reserved ); + return ( chroma_scaling_from_luma == rhs.chroma_scaling_from_luma ) && + ( overlap_flag == rhs.overlap_flag ) && + ( clip_to_restricted_range == rhs.clip_to_restricted_range ) && + ( update_grain == rhs.update_grain ) && + ( reserved == rhs.reserved ); } bool operator!=( AV1FilmGrainFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4700,15 +5001,31 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1FilmGrain const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( grain_scaling_minus_8 == rhs.grain_scaling_minus_8 ) && ( ar_coeff_lag == rhs.ar_coeff_lag ) && - ( ar_coeff_shift_minus_6 == rhs.ar_coeff_shift_minus_6 ) && ( grain_scale_shift == rhs.grain_scale_shift ) && ( grain_seed == rhs.grain_seed ) && - ( film_grain_params_ref_idx == rhs.film_grain_params_ref_idx ) && ( num_y_points == rhs.num_y_points ) && - ( point_y_value == rhs.point_y_value ) && ( point_y_scaling == rhs.point_y_scaling ) && ( num_cb_points == rhs.num_cb_points ) && - ( point_cb_value == rhs.point_cb_value ) && ( point_cb_scaling == rhs.point_cb_scaling ) && ( num_cr_points == rhs.num_cr_points ) && - ( point_cr_value == rhs.point_cr_value ) && ( point_cr_scaling == rhs.point_cr_scaling ) && - ( ar_coeffs_y_plus_128 == rhs.ar_coeffs_y_plus_128 ) && ( ar_coeffs_cb_plus_128 == rhs.ar_coeffs_cb_plus_128 ) && - ( ar_coeffs_cr_plus_128 == rhs.ar_coeffs_cr_plus_128 ) && ( cb_mult == rhs.cb_mult ) && ( cb_luma_mult == rhs.cb_luma_mult ) && - ( cb_offset == rhs.cb_offset ) && ( cr_mult == rhs.cr_mult ) && ( cr_luma_mult == rhs.cr_luma_mult ) && ( cr_offset == rhs.cr_offset ); + return ( flags == rhs.flags ) && + ( grain_scaling_minus_8 == rhs.grain_scaling_minus_8 ) && + ( ar_coeff_lag == rhs.ar_coeff_lag ) && + ( ar_coeff_shift_minus_6 == rhs.ar_coeff_shift_minus_6 ) && + ( grain_scale_shift == rhs.grain_scale_shift ) && + ( grain_seed == rhs.grain_seed ) && + ( film_grain_params_ref_idx == rhs.film_grain_params_ref_idx ) && + ( num_y_points == rhs.num_y_points ) && + ( point_y_value == rhs.point_y_value ) && + ( point_y_scaling == rhs.point_y_scaling ) && + ( num_cb_points == rhs.num_cb_points ) && + ( point_cb_value == rhs.point_cb_value ) && + ( point_cb_scaling == rhs.point_cb_scaling ) && + ( num_cr_points == rhs.num_cr_points ) && + ( point_cr_value == rhs.point_cr_value ) && + ( point_cr_scaling == rhs.point_cr_scaling ) && + ( ar_coeffs_y_plus_128 == rhs.ar_coeffs_y_plus_128 ) && + ( ar_coeffs_cb_plus_128 == rhs.ar_coeffs_cb_plus_128 ) && + ( ar_coeffs_cr_plus_128 == rhs.ar_coeffs_cr_plus_128 ) && + ( cb_mult == rhs.cb_mult ) && + ( cb_luma_mult == rhs.cb_luma_mult ) && + ( cb_offset == rhs.cb_offset ) && + ( cr_mult == rhs.cr_mult ) && + ( cr_luma_mult == rhs.cr_luma_mult ) && + ( cr_offset == rhs.cr_offset ); } bool operator!=( AV1FilmGrain const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4770,16 +5087,26 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1SequenceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( still_picture == rhs.still_picture ) && ( reduced_still_picture_header == rhs.reduced_still_picture_header ) && - ( use_128x128_superblock == rhs.use_128x128_superblock ) && ( enable_filter_intra == rhs.enable_filter_intra ) && - ( enable_intra_edge_filter == rhs.enable_intra_edge_filter ) && ( enable_interintra_compound == rhs.enable_interintra_compound ) && - ( enable_masked_compound == rhs.enable_masked_compound ) && ( enable_warped_motion == rhs.enable_warped_motion ) && - ( enable_dual_filter == rhs.enable_dual_filter ) && ( enable_order_hint == rhs.enable_order_hint ) && - ( enable_jnt_comp == rhs.enable_jnt_comp ) && ( enable_ref_frame_mvs == rhs.enable_ref_frame_mvs ) && - ( frame_id_numbers_present_flag == rhs.frame_id_numbers_present_flag ) && ( enable_superres == rhs.enable_superres ) && - ( enable_cdef == rhs.enable_cdef ) && ( enable_restoration == rhs.enable_restoration ) && - ( film_grain_params_present == rhs.film_grain_params_present ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && - ( initial_display_delay_present_flag == rhs.initial_display_delay_present_flag ) && ( reserved == rhs.reserved ); + return ( still_picture == rhs.still_picture ) && + ( reduced_still_picture_header == rhs.reduced_still_picture_header ) && + ( use_128x128_superblock == rhs.use_128x128_superblock ) && + ( enable_filter_intra == rhs.enable_filter_intra ) && + ( enable_intra_edge_filter == rhs.enable_intra_edge_filter ) && + ( enable_interintra_compound == rhs.enable_interintra_compound ) && + ( enable_masked_compound == rhs.enable_masked_compound ) && + ( enable_warped_motion == rhs.enable_warped_motion ) && + ( enable_dual_filter == rhs.enable_dual_filter ) && + ( enable_order_hint == rhs.enable_order_hint ) && + ( enable_jnt_comp == rhs.enable_jnt_comp ) && + ( enable_ref_frame_mvs == rhs.enable_ref_frame_mvs ) && + ( frame_id_numbers_present_flag == rhs.frame_id_numbers_present_flag ) && + ( enable_superres == rhs.enable_superres ) && + ( enable_cdef == rhs.enable_cdef ) && + ( enable_restoration == rhs.enable_restoration ) && + ( film_grain_params_present == rhs.film_grain_params_present ) && + ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( initial_display_delay_present_flag == rhs.initial_display_delay_present_flag ) && + ( reserved == rhs.reserved ); } bool operator!=( AV1SequenceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4836,12 +5163,20 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( AV1SequenceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( seq_profile == rhs.seq_profile ) && ( frame_width_bits_minus_1 == rhs.frame_width_bits_minus_1 ) && - ( frame_height_bits_minus_1 == rhs.frame_height_bits_minus_1 ) && ( max_frame_width_minus_1 == rhs.max_frame_width_minus_1 ) && - ( max_frame_height_minus_1 == rhs.max_frame_height_minus_1 ) && ( delta_frame_id_length_minus_2 == rhs.delta_frame_id_length_minus_2 ) && - ( additional_frame_id_length_minus_1 == rhs.additional_frame_id_length_minus_1 ) && ( order_hint_bits_minus_1 == rhs.order_hint_bits_minus_1 ) && - ( seq_force_integer_mv == rhs.seq_force_integer_mv ) && ( seq_force_screen_content_tools == rhs.seq_force_screen_content_tools ) && - ( reserved1 == rhs.reserved1 ) && ( pColorConfig == rhs.pColorConfig ) && ( pTimingInfo == rhs.pTimingInfo ); + return ( flags == rhs.flags ) && + ( seq_profile == rhs.seq_profile ) && + ( frame_width_bits_minus_1 == rhs.frame_width_bits_minus_1 ) && + ( frame_height_bits_minus_1 == rhs.frame_height_bits_minus_1 ) && + ( max_frame_width_minus_1 == rhs.max_frame_width_minus_1 ) && + ( max_frame_height_minus_1 == rhs.max_frame_height_minus_1 ) && + ( delta_frame_id_length_minus_2 == rhs.delta_frame_id_length_minus_2 ) && + ( additional_frame_id_length_minus_1 == rhs.additional_frame_id_length_minus_1 ) && + ( order_hint_bits_minus_1 == rhs.order_hint_bits_minus_1 ) && + ( seq_force_integer_mv == rhs.seq_force_integer_mv ) && + ( seq_force_screen_content_tools == rhs.seq_force_screen_content_tools ) && + ( reserved1 == rhs.reserved1 ) && + ( pColorConfig == rhs.pColorConfig ) && + ( pTimingInfo == rhs.pTimingInfo ); } bool operator!=( AV1SequenceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4896,19 +5231,36 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && - ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && - ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && - ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && - ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && - ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && - ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && - ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && - ( reduced_tx_set == rhs.reduced_tx_set ) && ( reference_select == rhs.reference_select ) && ( skip_mode_present == rhs.skip_mode_present ) && - ( delta_q_present == rhs.delta_q_present ) && ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && - ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && - ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && - ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( apply_grain == rhs.apply_grain ) && ( reserved == rhs.reserved ); + return ( error_resilient_mode == rhs.error_resilient_mode ) && + ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && + ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && + ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && + ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && + ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && + ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && + ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && + ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && + ( reference_select == rhs.reference_select ) && + ( skip_mode_present == rhs.skip_mode_present ) && + ( delta_q_present == rhs.delta_q_present ) && + ( delta_lf_present == rhs.delta_lf_present ) && + ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && + ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && + ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && + ( usesChromaLr == rhs.usesChromaLr ) && + ( apply_grain == rhs.apply_grain ) && + ( reserved == rhs.reserved ); } bool operator!=( DecodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -4975,14 +5327,30 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( current_frame_id == rhs.current_frame_id ) && ( OrderHint == rhs.OrderHint ) && - ( primary_ref_frame == rhs.primary_ref_frame ) && ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( reserved1 == rhs.reserved1 ) && - ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && - ( delta_lf_res == rhs.delta_lf_res ) && ( SkipModeFrame == rhs.SkipModeFrame ) && ( coded_denom == rhs.coded_denom ) && - ( reserved2 == rhs.reserved2 ) && ( OrderHints == rhs.OrderHints ) && ( expectedFrameId == rhs.expectedFrameId ) && - ( pTileInfo == rhs.pTileInfo ) && ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && - ( pLoopFilter == rhs.pLoopFilter ) && ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && - ( pGlobalMotion == rhs.pGlobalMotion ) && ( pFilmGrain == rhs.pFilmGrain ); + return ( flags == rhs.flags ) && + ( frame_type == rhs.frame_type ) && + ( current_frame_id == rhs.current_frame_id ) && + ( OrderHint == rhs.OrderHint ) && + ( primary_ref_frame == rhs.primary_ref_frame ) && + ( refresh_frame_flags == rhs.refresh_frame_flags ) && + ( reserved1 == rhs.reserved1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && + ( TxMode == rhs.TxMode ) && + ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && + ( SkipModeFrame == rhs.SkipModeFrame ) && + ( coded_denom == rhs.coded_denom ) && + ( reserved2 == rhs.reserved2 ) && + ( OrderHints == rhs.OrderHints ) && + ( expectedFrameId == rhs.expectedFrameId ) && + ( pTileInfo == rhs.pTileInfo ) && + ( pQuantization == rhs.pQuantization ) && + ( pSegmentation == rhs.pSegmentation ) && + ( pLoopFilter == rhs.pLoopFilter ) && + ( pCDEF == rhs.pCDEF ) && + ( pLoopRestoration == rhs.pLoopRestoration ) && + ( pGlobalMotion == rhs.pGlobalMotion ) && + ( pFilmGrain == rhs.pFilmGrain ); } bool operator!=( DecodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -5044,7 +5412,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( reserved == rhs.reserved ); } @@ -5085,7 +5454,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( DecodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( RefFrameSignBias == rhs.RefFrameSignBias ) && ( OrderHint == rhs.OrderHint ) && + return ( flags == rhs.flags ) && + ( frame_type == rhs.frame_type ) && + ( RefFrameSignBias == rhs.RefFrameSignBias ) && + ( OrderHint == rhs.OrderHint ) && ( SavedOrderHints == rhs.SavedOrderHints ); } @@ -5134,7 +5506,8 @@ namespace VULKAN_HPP_NAMESPACE { return ( buffer_delay_length_minus_1 == rhs.buffer_delay_length_minus_1 ) && ( buffer_removal_time_length_minus_1 == rhs.buffer_removal_time_length_minus_1 ) && - ( frame_presentation_time_length_minus_1 == rhs.frame_presentation_time_length_minus_1 ) && ( reserved1 == rhs.reserved1 ) && + ( frame_presentation_time_length_minus_1 == rhs.frame_presentation_time_length_minus_1 ) && + ( reserved1 == rhs.reserved1 ) && ( num_units_in_decoding_tick == rhs.num_units_in_decoding_tick ); } @@ -5216,8 +5589,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( decoder_model_present_for_this_op == rhs.decoder_model_present_for_this_op ) && ( low_delay_mode_flag == rhs.low_delay_mode_flag ) && - ( initial_display_delay_present_for_this_op == rhs.initial_display_delay_present_for_this_op ) && ( reserved == rhs.reserved ); + return ( decoder_model_present_for_this_op == rhs.decoder_model_present_for_this_op ) && + ( low_delay_mode_flag == rhs.low_delay_mode_flag ) && + ( initial_display_delay_present_for_this_op == rhs.initial_display_delay_present_for_this_op ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -5258,8 +5633,12 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( operating_point_idc == rhs.operating_point_idc ) && ( seq_level_idx == rhs.seq_level_idx ) && - ( seq_tier == rhs.seq_tier ) && ( decoder_buffer_delay == rhs.decoder_buffer_delay ) && ( encoder_buffer_delay == rhs.encoder_buffer_delay ) && + return ( flags == rhs.flags ) && + ( operating_point_idc == rhs.operating_point_idc ) && + ( seq_level_idx == rhs.seq_level_idx ) && + ( seq_tier == rhs.seq_tier ) && + ( decoder_buffer_delay == rhs.decoder_buffer_delay ) && + ( encoder_buffer_delay == rhs.encoder_buffer_delay ) && ( initial_display_delay_minus_1 == rhs.initial_display_delay_minus_1 ); } @@ -5304,20 +5683,36 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && - ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && - ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && - ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && - ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && - ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && - ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && - ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && - ( reduced_tx_set == rhs.reduced_tx_set ) && ( skip_mode_present == rhs.skip_mode_present ) && ( delta_q_present == rhs.delta_q_present ) && - ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && - ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && - ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && - ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( show_frame == rhs.show_frame ) && - ( showable_frame == rhs.showable_frame ) && ( reserved == rhs.reserved ); + return ( error_resilient_mode == rhs.error_resilient_mode ) && + ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && + ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && + ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && + ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && + ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && + ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && + ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && + ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && + ( skip_mode_present == rhs.skip_mode_present ) && + ( delta_q_present == rhs.delta_q_present ) && + ( delta_lf_present == rhs.delta_lf_present ) && + ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && + ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && + ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && + ( usesChromaLr == rhs.usesChromaLr ) && + ( show_frame == rhs.show_frame ) && + ( showable_frame == rhs.showable_frame ) && + ( reserved == rhs.reserved ); } bool operator!=( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -5384,16 +5779,33 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( frame_presentation_time == rhs.frame_presentation_time ) && - ( current_frame_id == rhs.current_frame_id ) && ( order_hint == rhs.order_hint ) && ( primary_ref_frame == rhs.primary_ref_frame ) && - ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( coded_denom == rhs.coded_denom ) && - ( render_width_minus_1 == rhs.render_width_minus_1 ) && ( render_height_minus_1 == rhs.render_height_minus_1 ) && - ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && - ( delta_lf_res == rhs.delta_lf_res ) && ( ref_order_hint == rhs.ref_order_hint ) && ( ref_frame_idx == rhs.ref_frame_idx ) && - ( reserved1 == rhs.reserved1 ) && ( delta_frame_id_minus_1 == rhs.delta_frame_id_minus_1 ) && ( pTileInfo == rhs.pTileInfo ) && - ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && ( pLoopFilter == rhs.pLoopFilter ) && - ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && ( pGlobalMotion == rhs.pGlobalMotion ) && - ( pExtensionHeader == rhs.pExtensionHeader ) && ( pBufferRemovalTimes == rhs.pBufferRemovalTimes ); + return ( flags == rhs.flags ) && + ( frame_type == rhs.frame_type ) && + ( frame_presentation_time == rhs.frame_presentation_time ) && + ( current_frame_id == rhs.current_frame_id ) && + ( order_hint == rhs.order_hint ) && + ( primary_ref_frame == rhs.primary_ref_frame ) && + ( refresh_frame_flags == rhs.refresh_frame_flags ) && + ( coded_denom == rhs.coded_denom ) && + ( render_width_minus_1 == rhs.render_width_minus_1 ) && + ( render_height_minus_1 == rhs.render_height_minus_1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && + ( TxMode == rhs.TxMode ) && + ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && + ( ref_order_hint == rhs.ref_order_hint ) && + ( ref_frame_idx == rhs.ref_frame_idx ) && + ( reserved1 == rhs.reserved1 ) && + ( delta_frame_id_minus_1 == rhs.delta_frame_id_minus_1 ) && + ( pTileInfo == rhs.pTileInfo ) && + ( pQuantization == rhs.pQuantization ) && + ( pSegmentation == rhs.pSegmentation ) && + ( pLoopFilter == rhs.pLoopFilter ) && + ( pCDEF == rhs.pCDEF ) && + ( pLoopRestoration == rhs.pLoopRestoration ) && + ( pGlobalMotion == rhs.pGlobalMotion ) && + ( pExtensionHeader == rhs.pExtensionHeader ) && + ( pBufferRemovalTimes == rhs.pBufferRemovalTimes ); } bool operator!=( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -5458,7 +5870,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( reserved == rhs.reserved ); } @@ -5499,8 +5912,12 @@ namespace VULKAN_HPP_NAMESPACE bool operator==( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) && ( RefFrameId == rhs.RefFrameId ) && ( frame_type == rhs.frame_type ) && ( OrderHint == rhs.OrderHint ) && - ( reserved1 == rhs.reserved1 ) && ( pExtensionHeader == rhs.pExtensionHeader ); + return ( flags == rhs.flags ) && + ( RefFrameId == rhs.RefFrameId ) && + ( frame_type == rhs.frame_type ) && + ( OrderHint == rhs.OrderHint ) && + ( reserved1 == rhs.reserved1 ) && + ( pExtensionHeader == rhs.pExtensionHeader ); } bool operator!=( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -5517,7 +5934,6 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; }; #endif - } // namespace VULKAN_HPP_VIDEO_NAMESPACE } // namespace VULKAN_HPP_NAMESPACE